ngram
listlengths
0
67.8k
[ "= console('smartctl -a {}'.format(partition['device'])) if 'No Errors Logged' in result: partition['smart'] = 'Passed'", "Logged' in result: partition['smart'] = 'Passed' else: partition['smart'] = 'Failed' handler.add({ 'partitions': partitions", "src.console import console from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler,", "= [] log.info('Starting smart tests.') for partition in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl", "if 'No Errors Logged' in result: partition['smart'] = 'Passed' else: partition['smart'] = 'Failed'", "@wrappers.stats @wrappers.injector def get(handler, log): partitions = disk_partitions() devices = [] log.info('Starting smart", "disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log): partitions = disk_partitions() devices =", "log.info('Checking smart test results.') for partition in partitions: if partition['device'] in devices: result", "sleep import src.wrappers as wrappers from src.console import console from .partitions import disk_partitions", "for smart test completion (2 minutes).') sleep(120) partitions = disk_partitions() log.info('Checking smart test", "import src.wrappers as wrappers from src.console import console from .partitions import disk_partitions @wrappers.jwt", "smart tests.') for partition in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device']))", "import console from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log):", "= disk_partitions() devices = [] log.info('Starting smart tests.') for partition in partitions: console('smartctl", "disk_partitions() log.info('Checking smart test results.') for partition in partitions: if partition['device'] in devices:", "import sleep import src.wrappers as wrappers from src.console import console from .partitions import", "devices.append(partition['device']) log.info('Waiting for smart test completion (2 minutes).') sleep(120) partitions = disk_partitions() log.info('Checking", "-t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test completion (2 minutes).') sleep(120) partitions", "@wrappers.injector def get(handler, log): partitions = disk_partitions() devices = [] log.info('Starting smart tests.')", "{}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test completion (2 minutes).')", "Errors Logged' in result: partition['smart'] = 'Passed' else: partition['smart'] = 'Failed' handler.add({ 'partitions':", "in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart", "{}'.format(partition['device'])) if 'No Errors Logged' in result: partition['smart'] = 'Passed' else: partition['smart'] =", "short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test completion (2 minutes).') sleep(120) partitions =", ".partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log): partitions = disk_partitions()", "smart test completion (2 minutes).') sleep(120) partitions = disk_partitions() log.info('Checking smart test results.')", "= disk_partitions() log.info('Checking smart test results.') for partition in partitions: if partition['device'] in", "[] log.info('Starting smart tests.') for partition in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t", "in result: partition['smart'] = 'Passed' else: partition['smart'] = 'Failed' handler.add({ 'partitions': partitions })", "from time import sleep import src.wrappers as wrappers from src.console import console from", "log.info('Waiting for smart test completion (2 minutes).') sleep(120) partitions = disk_partitions() log.info('Checking smart", "in devices: result = console('smartctl -a {}'.format(partition['device'])) if 'No Errors Logged' in result:", "for partition in partitions: if partition['device'] in devices: result = console('smartctl -a {}'.format(partition['device']))", "'No Errors Logged' in result: partition['smart'] = 'Passed' else: partition['smart'] = 'Failed' handler.add({", "partitions = disk_partitions() devices = [] log.info('Starting smart tests.') for partition in partitions:", "partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test", "<filename>src/api/storage/smart.py from time import sleep import src.wrappers as wrappers from src.console import console", "wrappers from src.console import console from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector", "from src.console import console from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def", "test results.') for partition in partitions: if partition['device'] in devices: result = console('smartctl", "sleep(120) partitions = disk_partitions() log.info('Checking smart test results.') for partition in partitions: if", "result = console('smartctl -a {}'.format(partition['device'])) if 'No Errors Logged' in result: partition['smart'] =", "log): partitions = disk_partitions() devices = [] log.info('Starting smart tests.') for partition in", "console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test completion", "def get(handler, log): partitions = disk_partitions() devices = [] log.info('Starting smart tests.') for", "console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test completion (2 minutes).') sleep(120)", "from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log): partitions =", "-a {}'.format(partition['device'])) if 'No Errors Logged' in result: partition['smart'] = 'Passed' else: partition['smart']", "minutes).') sleep(120) partitions = disk_partitions() log.info('Checking smart test results.') for partition in partitions:", "console('smartctl -a {}'.format(partition['device'])) if 'No Errors Logged' in result: partition['smart'] = 'Passed' else:", "partition in partitions: if partition['device'] in devices: result = console('smartctl -a {}'.format(partition['device'])) if", "for partition in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting", "results.') for partition in partitions: if partition['device'] in devices: result = console('smartctl -a", "as wrappers from src.console import console from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats", "@wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log): partitions = disk_partitions() devices = [] log.info('Starting", "@wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log): partitions = disk_partitions() devices = []", "tests.') for partition in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device'])", "(2 minutes).') sleep(120) partitions = disk_partitions() log.info('Checking smart test results.') for partition in", "get(handler, log): partitions = disk_partitions() devices = [] log.info('Starting smart tests.') for partition", "time import sleep import src.wrappers as wrappers from src.console import console from .partitions", "partitions: if partition['device'] in devices: result = console('smartctl -a {}'.format(partition['device'])) if 'No Errors", "completion (2 minutes).') sleep(120) partitions = disk_partitions() log.info('Checking smart test results.') for partition", "test completion (2 minutes).') sleep(120) partitions = disk_partitions() log.info('Checking smart test results.') for", "partition['device'] in devices: result = console('smartctl -a {}'.format(partition['device'])) if 'No Errors Logged' in", "import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log): partitions = disk_partitions() devices", "devices = [] log.info('Starting smart tests.') for partition in partitions: console('smartctl -X {}'.format(partition['device']))", "log.info('Starting smart tests.') for partition in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short", "if partition['device'] in devices: result = console('smartctl -a {}'.format(partition['device'])) if 'No Errors Logged'", "partitions = disk_partitions() log.info('Checking smart test results.') for partition in partitions: if partition['device']", "devices: result = console('smartctl -a {}'.format(partition['device'])) if 'No Errors Logged' in result: partition['smart']", "src.wrappers as wrappers from src.console import console from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint", "-X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test completion (2", "{}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test completion (2 minutes).') sleep(120) partitions = disk_partitions()", "console from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log): partitions", "smart test results.') for partition in partitions: if partition['device'] in devices: result =", "in partitions: if partition['device'] in devices: result = console('smartctl -a {}'.format(partition['device'])) if 'No", "partition in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for", "disk_partitions() devices = [] log.info('Starting smart tests.') for partition in partitions: console('smartctl -X" ]
[ "= requests.get(url) f = open(page + \".txt\", \"a+\") # Parse HTML and save", "try: link = one_a_tag[\"href\"] if page in link: p = patron.match(link) urlink =", "to BeautifulSoup object soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page", "loop through all a tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))):", "pyperclip def main(url, page): # Connect to the URL response = requests.get(url) f", "links one_a_tag = soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"] if page in link: p", "\"__main__\": # Set the URL you want to webscrape from url = sys.argv[1]", "+ page + \"[.a-z/?#!A-Z0-9-_]*)\" ) # To download the whole data set, let's", "BeautifulSoup object soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page +", "requests import pyperclip def main(url, page): # Connect to the URL response =", "Parse HTML and save to BeautifulSoup object soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron =", "= open(page + \".txt\", \"a+\") # Parse HTML and save to BeautifulSoup object", "== \"__main__\": # Set the URL you want to webscrape from url =", "tags are for links one_a_tag = soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"] if page", "if page in link: p = patron.match(link) urlink = p.group(1) + p.group(2) f.write(urlink", "bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\" ) # To", "max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))): # 'a' tags are for links one_a_tag =", "#!/usr/bin/python3 # Import libraries import re import bs4 import sys import time import", "bar1.finish() if __name__ == \"__main__\": # Set the URL you want to webscrape", "page + \"[.a-z/?#!A-Z0-9-_]*)\" ) # To download the whole data set, let's do", "To download the whole data set, let's do a for loop through all", "range(len(soup.findAll(\"a\"))): # 'a' tags are for links one_a_tag = soup.findAll(\"a\")[i] try: link =", "do a for loop through all a tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for", "\"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\" ) # To download", "(KeyError): pass bar1.next() bar1.finish() if __name__ == \"__main__\": # Set the URL you", "pass bar1.next() bar1.finish() if __name__ == \"__main__\": # Set the URL you want", "one_a_tag = soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"] if page in link: p =", "# 'a' tags are for links one_a_tag = soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"]", "re import bs4 import sys import time import progress.bar import requests import pyperclip", "set, let's do a for loop through all a tags bar1 = progress.bar.Bar(\"Procesando:\",", "+ \"[.a-z/?#!A-Z0-9-_]*)\" ) # To download the whole data set, let's do a", "you want to webscrape from url = sys.argv[1] page = sys.argv[2] main(url, page)", "= soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"] if page in link: p = patron.match(link)", "response = requests.get(url) f = open(page + \".txt\", \"a+\") # Parse HTML and", "\"a+\") # Parse HTML and save to BeautifulSoup object soup = bs4.BeautifulSoup(response.text, \"html.parser\")", "to the URL response = requests.get(url) f = open(page + \".txt\", \"a+\") #", "+ \".txt\", \"a+\") # Parse HTML and save to BeautifulSoup object soup =", "re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\" ) # To download the whole data", "all a tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))): # 'a'", "+ \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next() bar1.finish() if __name__ ==", "import re import bs4 import sys import time import progress.bar import requests import", "link = one_a_tag[\"href\"] if page in link: p = patron.match(link) urlink = p.group(1)", "# Parse HTML and save to BeautifulSoup object soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron", "link: p = patron.match(link) urlink = p.group(1) + p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink)", "patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\" ) # To download the", "let's do a for loop through all a tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\")))", "data set, let's do a for loop through all a tags bar1 =", "soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\" )", "pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next() bar1.finish() if __name__ == \"__main__\": # Set", "p.group(1) + p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next()", "# Connect to the URL response = requests.get(url) f = open(page + \".txt\",", "p = patron.match(link) urlink = p.group(1) + p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste()", "main(url, page): # Connect to the URL response = requests.get(url) f = open(page", "sys import time import progress.bar import requests import pyperclip def main(url, page): #", "# Import libraries import re import bs4 import sys import time import progress.bar", "in range(len(soup.findAll(\"a\"))): # 'a' tags are for links one_a_tag = soup.findAll(\"a\")[i] try: link", "import requests import pyperclip def main(url, page): # Connect to the URL response", "import bs4 import sys import time import progress.bar import requests import pyperclip def", "and save to BeautifulSoup object soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\"", "the URL you want to webscrape from url = sys.argv[1] page = sys.argv[2]", "for links one_a_tag = soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"] if page in link:", "time.sleep(1) except (KeyError): pass bar1.next() bar1.finish() if __name__ == \"__main__\": # Set the", "r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\" ) # To download the whole data set,", "through all a tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))): #", "f = open(page + \".txt\", \"a+\") # Parse HTML and save to BeautifulSoup", "i in range(len(soup.findAll(\"a\"))): # 'a' tags are for links one_a_tag = soup.findAll(\"a\")[i] try:", "\".txt\", \"a+\") # Parse HTML and save to BeautifulSoup object soup = bs4.BeautifulSoup(response.text,", "in link: p = patron.match(link) urlink = p.group(1) + p.group(2) f.write(urlink + \"\\n\")", "libraries import re import bs4 import sys import time import progress.bar import requests", "pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next() bar1.finish() if __name__ == \"__main__\": #", "save to BeautifulSoup object soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" +", "Set the URL you want to webscrape from url = sys.argv[1] page =", "for i in range(len(soup.findAll(\"a\"))): # 'a' tags are for links one_a_tag = soup.findAll(\"a\")[i]", "a for loop through all a tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i", "progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))): # 'a' tags are for links one_a_tag", "requests.get(url) f = open(page + \".txt\", \"a+\") # Parse HTML and save to", "open(page + \".txt\", \"a+\") # Parse HTML and save to BeautifulSoup object soup", "Import libraries import re import bs4 import sys import time import progress.bar import", "object soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\"", "a tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))): # 'a' tags", "bar1.next() bar1.finish() if __name__ == \"__main__\": # Set the URL you want to", "if __name__ == \"__main__\": # Set the URL you want to webscrape from", "= p.group(1) + p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass", "time import progress.bar import requests import pyperclip def main(url, page): # Connect to", "\"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next() bar1.finish() if __name__ == \"__main__\":", "except (KeyError): pass bar1.next() bar1.finish() if __name__ == \"__main__\": # Set the URL", "# To download the whole data set, let's do a for loop through", "for loop through all a tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in", "the whole data set, let's do a for loop through all a tags", "are for links one_a_tag = soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"] if page in", "import progress.bar import requests import pyperclip def main(url, page): # Connect to the", "= progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))): # 'a' tags are for links", "= one_a_tag[\"href\"] if page in link: p = patron.match(link) urlink = p.group(1) +", "import time import progress.bar import requests import pyperclip def main(url, page): # Connect", "f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next() bar1.finish() if __name__", "whole data set, let's do a for loop through all a tags bar1", "URL you want to webscrape from url = sys.argv[1] page = sys.argv[2] main(url,", "\"[.a-z/?#!A-Z0-9-_]*)\" ) # To download the whole data set, let's do a for", "= patron.match(link) urlink = p.group(1) + p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1)", "page in link: p = patron.match(link) urlink = p.group(1) + p.group(2) f.write(urlink +", "bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))): # 'a' tags are for", "= bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\" ) #", "patron.match(link) urlink = p.group(1) + p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except", "import sys import time import progress.bar import requests import pyperclip def main(url, page):", "'a' tags are for links one_a_tag = soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"] if", "# Set the URL you want to webscrape from url = sys.argv[1] page", "import pyperclip def main(url, page): # Connect to the URL response = requests.get(url)", "__name__ == \"__main__\": # Set the URL you want to webscrape from url", "<reponame>searchsam/link2clip<gh_stars>0 #!/usr/bin/python3 # Import libraries import re import bs4 import sys import time", "p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next() bar1.finish() if", "bs4 import sys import time import progress.bar import requests import pyperclip def main(url,", "tags bar1 = progress.bar.Bar(\"Procesando:\", max=len(soup.findAll(\"a\"))) for i in range(len(soup.findAll(\"a\"))): # 'a' tags are", "HTML and save to BeautifulSoup object soup = bs4.BeautifulSoup(response.text, \"html.parser\") patron = re.compile(", "+ p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next() bar1.finish()", "URL response = requests.get(url) f = open(page + \".txt\", \"a+\") # Parse HTML", "progress.bar import requests import pyperclip def main(url, page): # Connect to the URL", "one_a_tag[\"href\"] if page in link: p = patron.match(link) urlink = p.group(1) + p.group(2)", ") # To download the whole data set, let's do a for loop", "soup.findAll(\"a\")[i] try: link = one_a_tag[\"href\"] if page in link: p = patron.match(link) urlink", "Connect to the URL response = requests.get(url) f = open(page + \".txt\", \"a+\")", "the URL response = requests.get(url) f = open(page + \".txt\", \"a+\") # Parse", "def main(url, page): # Connect to the URL response = requests.get(url) f =", "= re.compile( r\"(http://|https://)[a-z0-9\\-\\.\\/\\?=\\&]*(\" + page + \"[.a-z/?#!A-Z0-9-_]*)\" ) # To download the whole", "urlink = p.group(1) + p.group(2) f.write(urlink + \"\\n\") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError):", "page): # Connect to the URL response = requests.get(url) f = open(page +", "download the whole data set, let's do a for loop through all a" ]
[ "packaging.utils import Version import fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops():", "import fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def", "if old_numpy: class OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random = rng.random_sample rng.integers =", "rng = OldRNG(1234) rng.random = rng.random_sample rng.integers = rng.randint return rng else: return", "OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random = rng.random_sample rng.integers = rng.randint return rng", "@pytest.fixture(scope='function') def rng(): if old_numpy: class OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random =", "import Version import fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize()", "= OldRNG(1234) rng.random = rng.random_sample rng.integers = rng.randint return rng else: return np.random.default_rng(1234)", "initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy: class OldRNG(np.random.RandomState): pass rng = OldRNG(1234)", "< Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy: class OldRNG(np.random.RandomState):", "import numpy as np import pytest from packaging.utils import Version import fast_numpy_loops old_numpy", "from packaging.utils import Version import fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def", "old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if", "pass rng = OldRNG(1234) rng.random = rng.random_sample rng.integers = rng.randint return rng else:", "fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng():", "Version import fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function')", "Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy: class OldRNG(np.random.RandomState): pass", "def rng(): if old_numpy: class OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random = rng.random_sample", "rng(): if old_numpy: class OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random = rng.random_sample rng.integers", "def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy: class OldRNG(np.random.RandomState): pass rng =", "numpy as np import pytest from packaging.utils import Version import fast_numpy_loops old_numpy =", "class OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random = rng.random_sample rng.integers = rng.randint return", "as np import pytest from packaging.utils import Version import fast_numpy_loops old_numpy = Version(np.__version__)", "Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy: class", "old_numpy: class OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random = rng.random_sample rng.integers = rng.randint", "= Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy:", "pytest from packaging.utils import Version import fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session')", "@pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy: class OldRNG(np.random.RandomState): pass rng", "np import pytest from packaging.utils import Version import fast_numpy_loops old_numpy = Version(np.__version__) <", "import pytest from packaging.utils import Version import fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18')", "fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy: class OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random" ]
[ "RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier), Bool('%s.h0' % identifier), Bool('%s.h1' % identifier), Bool('%s.zm0'", "Bool('%s.v0' % identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\" Generate", "List[RQbitVal]: \"\"\" Generate many named rqbits. :param identifiers: chose identifiers. :return: List of", "zm1 self.hm0 = hm0 self.hm1 = hm1 self.v0 = v0 self.v1 = v1", "other.h1 or \\ self.zm0 != other.zm0 or self.zm1 != other.zm1 or \\ self.hm0", "Bool('%s.h0' % identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0'", ":param v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if not isinstance(z0,", "= v0 self.v1 = v1 def __eq__(self, other: 'RQbitVal'): return self.z0 == other.z0", "== other.zm1 and \\ self.hm0 == other.hm0 and self.hm1 == other.hm1 and \\", "% identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\" Generate many named rqbits. :param", "BoolRef = False, hm0: BoolRef = False, hm1: BoolRef = False, v0: BoolRef", "= False, h0: BoolRef = False, h1: BoolRef = False, zm0: BoolRef =", "self.v0 == other.v0 and self.v1 == other.v1 def __neq__(self, other: 'RQbitVal'): return self.z0", "!= other.z1 or \\ self.h0 != other.h0 or self.h1 != other.h1 or \\", "attributes = ['z0', 'z1', 'v0', 'v1'] # ['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1',", "attribute in attributes: conjunction = And([getattr(self, v) == (v == attribute) for v", "in attributes: conjunction = And([getattr(self, v) == (v == attribute) for v in", "% identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' %", "'z1'] else: attributes = ['z0', 'z1', 'v0', 'v1'] # ['z0', 'z1', 'h0', 'h1',", "!= other.h1 or \\ self.zm0 != other.zm0 or self.zm1 != other.zm1 or \\", "__neq__(self, other: 'RQbitVal'): return self.z0 != other.z0 or self.z1 != other.z1 or \\", "Or(conjunctions) def get_identifier(self): \"\"\" :return: Identifier. \"\"\" return str(self.z0).split('.')[0] def RQbit(identifier: str) ->", "zm0: BoolRef = False, zm1: BoolRef = False, hm0: BoolRef = False, hm1:", "'hm1', 'v0', 'v1'] conjunctions = [] for attribute in attributes: conjunction = And([getattr(self,", "z0: (1, 0) (Computational basis) :param z1: (0, 1) :param h0: (1, 1)/sqrt(2)", "get_identifier(self): \"\"\" :return: Identifier. \"\"\" return str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal: \"\"\"", "self.h1 != other.h1 or \\ self.zm0 != other.zm0 or self.zm1 != other.zm1 or", "!= other.z0 or self.z1 != other.z1 or \\ self.h0 != other.h0 or self.h1", "identifiers: chose identifiers. :return: List of rqbits. \"\"\" return [RQbit(identifier) for identifier in", "be one.') self.z0 = z0 self.z1 = z1 self.h0 = h0 self.h1 =", "import Bool, BoolRef, And, Or from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def __init__(self,", "= v1 def __eq__(self, other: 'RQbitVal'): return self.z0 == other.z0 and self.z1 ==", "def __eq__(self, other: 'RQbitVal'): return self.z0 == other.z0 and self.z1 == other.z1 and", "h1: (1, -1)/sqrt(2) :param zm0: (-1, 0) :param zm1: (0, -1) :param hm0:", "Constructor for a reduced state space qbit. :param z0: (1, 0) (Computational basis)", "or \\ self.zm0 != other.zm0 or self.zm1 != other.zm1 or \\ self.hm0 !=", "And, Or from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def __init__(self, z0: BoolRef =", "import QbitVal class RQbitVal(QbitVal): def __init__(self, z0: BoolRef = False, z1: BoolRef =", "identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier),", "zm0, zm1, hm0, hm1, v0, v1], dtype=int)) != 1: raise ValueError('Exactly one parameter", "((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if not isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0, h1,", "Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier), Bool('%s.v1'", "or self.h1 != other.h1 or \\ self.zm0 != other.zm0 or self.zm1 != other.zm1", "hm1 self.v0 = v0 self.v1 = v1 def __eq__(self, other: 'RQbitVal'): return self.z0", "BoolRef = False, hm1: BoolRef = False, v0: BoolRef = False, v1: BoolRef", "return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier), Bool('%s.h0' % identifier), Bool('%s.h1' % identifier),", "for a reduced state space qbit. :param z0: (1, 0) (Computational basis) :param", ":param hm0: (-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param", "and \\ self.v0 == other.v0 and self.v1 == other.v1 def __neq__(self, other: 'RQbitVal'):", "self.zm0 == other.zm0 and self.zm1 == other.zm1 and \\ self.hm0 == other.hm0 and", "v0: BoolRef = False, v1: BoolRef = False): \"\"\" Constructor for a reduced", "= False, z1: BoolRef = False, h0: BoolRef = False, h1: BoolRef =", "other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1,", "a named rqbit. :param identifier: chosen identifier. :return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0' %", "one.') self.z0 = z0 self.z1 = z1 self.h0 = h0 self.h1 = h1", "= False, zm1: BoolRef = False, hm0: BoolRef = False, hm1: BoolRef =", "dtype=int)) != 1: raise ValueError('Exactly one parameter has to be one.') self.z0 =", "v1: BoolRef = False): \"\"\" Constructor for a reduced state space qbit. :param", "other.hm1 or \\ self.v0 != other.v0 or self.v1 != other.v1 def __neg__(self): return", "-1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2)", "= False, h1: BoolRef = False, zm0: BoolRef = False, zm1: BoolRef =", "identifier. :return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier), Bool('%s.h0' %", "other.z1 and \\ self.h0 == other.h0 and self.h1 == other.h1 and \\ self.zm0", "from z3 import Bool, BoolRef, And, Or from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal):", "BoolRef) and sum(np.array([z0, z1, h0, h1, zm0, zm1, hm0, hm1, v0, v1], dtype=int))", "self.hm1 == other.hm1 and \\ self.v0 == other.v0 and self.v1 == other.v1 def", "% identifier), Bool('%s.z1' % identifier), Bool('%s.h0' % identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' %", "not isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0, h1, zm0, zm1, hm0, hm1, v0,", "== other.z0 and self.z1 == other.z1 and \\ self.h0 == other.h0 and self.h1", "super().__init__() if not isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0, h1, zm0, zm1, hm0,", "str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal: \"\"\" Generate a named rqbit. :param identifier:", "as np from z3 import Bool, BoolRef, And, Or from quavl.lib.expressions.qbit import QbitVal", "'hm0', 'hm1', 'v0', 'v1'] conjunctions = [] for attribute in attributes: conjunction =", "'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions = [] for attribute in attributes:", "self.z0 != other.z0 or self.z1 != other.z1 or \\ self.h0 != other.h0 or", "(1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if not isinstance(z0, BoolRef) and sum(np.array([z0,", "and \\ self.zm0 == other.zm0 and self.zm1 == other.zm1 and \\ self.hm0 ==", "h0: BoolRef = False, h1: BoolRef = False, zm0: BoolRef = False, zm1:", "other.hm0 or self.hm1 != other.hm1 or \\ self.v0 != other.v0 or self.v1 !=", "(-1, 0) :param zm1: (0, -1) :param hm0: (-1, -1)/sqrt(2) :param hm1: (-1,", "one parameter has to be one.') self.z0 = z0 self.z1 = z1 self.h0", "attributes: conjunction = And([getattr(self, v) == (v == attribute) for v in attributes])", ":param h0: (1, 1)/sqrt(2) (Hadamard basis) :param h1: (1, -1)/sqrt(2) :param zm0: (-1,", "\\ self.hm0 != other.hm0 or self.hm1 != other.hm1 or \\ self.v0 != other.v0", "\\ self.hm0 == other.hm0 and self.hm1 == other.hm1 and \\ self.v0 == other.v0", "__neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def", "identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier),", "self.v1 = v1 def __eq__(self, other: 'RQbitVal'): return self.z0 == other.z0 and self.z1", "hm0, hm1, v0, v1], dtype=int)) != 1: raise ValueError('Exactly one parameter has to", "identifier), Bool('%s.z1' % identifier), Bool('%s.h0' % identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier),", "z1 self.h0 = h0 self.h1 = h1 self.zm0 = zm0 self.zm1 = zm1", "= False, v0: BoolRef = False, v1: BoolRef = False): \"\"\" Constructor for", "or self.hm1 != other.hm1 or \\ self.v0 != other.v0 or self.v1 != other.v1", "identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier),", "RQbitVal(QbitVal): def __init__(self, z0: BoolRef = False, z1: BoolRef = False, h0: BoolRef", "(0, -1) :param hm0: (-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2,", "or self.z1 != other.z1 or \\ self.h0 != other.h0 or self.h1 != other.h1", "self.hm0 == other.hm0 and self.hm1 == other.hm1 and \\ self.v0 == other.v0 and", "other.v1 def __neq__(self, other: 'RQbitVal'): return self.z0 != other.z0 or self.z1 != other.z1", "or self.v1 != other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0,", "other.z0 or self.z1 != other.z1 or \\ self.h0 != other.h0 or self.h1 !=", "== other.v1 def __neq__(self, other: 'RQbitVal'): return self.z0 != other.z0 or self.z1 !=", "\"\"\" Generate a named rqbit. :param identifier: chosen identifier. :return: RQbit. \"\"\" return", "z0: BoolRef = False, z1: BoolRef = False, h0: BoolRef = False, h1:", "self.z0 == other.z0 and self.z1 == other.z1 and \\ self.h0 == other.h0 and", "other.zm0 or self.zm1 != other.zm1 or \\ self.hm0 != other.hm0 or self.hm1 !=", "has to be one.') self.z0 = z0 self.z1 = z1 self.h0 = h0", ":param zm0: (-1, 0) :param zm1: (0, -1) :param hm0: (-1, -1)/sqrt(2) :param", "Bool('%s.z1' % identifier), Bool('%s.h0' % identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1'", "= zm1 self.hm0 = hm0 self.hm1 = hm1 self.v0 = v0 self.v1 =", "And([getattr(self, v) == (v == attribute) for v in attributes]) conjunctions.append(conjunction) return Or(conjunctions)", "== other.h0 and self.h1 == other.h1 and \\ self.zm0 == other.zm0 and self.zm1", "reduced state space qbit. :param z0: (1, 0) (Computational basis) :param z1: (0,", "False, zm0: BoolRef = False, zm1: BoolRef = False, hm0: BoolRef = False,", "List[str]) -> List[RQbitVal]: \"\"\" Generate many named rqbits. :param identifiers: chose identifiers. :return:", "% identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' %", "Generate a named rqbit. :param identifier: chosen identifier. :return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0'", ":param zm1: (0, -1) :param hm0: (-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param", "other.z0 and self.z1 == other.z1 and \\ self.h0 == other.h0 and self.h1 ==", "\\ self.zm0 == other.zm0 and self.zm1 == other.zm1 and \\ self.hm0 == other.hm0", "v0 self.v1 = v1 def __eq__(self, other: 'RQbitVal'): return self.z0 == other.z0 and", "False, h0: BoolRef = False, h1: BoolRef = False, zm0: BoolRef = False,", "np from z3 import Bool, BoolRef, And, Or from quavl.lib.expressions.qbit import QbitVal class", "= False): \"\"\" Constructor for a reduced state space qbit. :param z0: (1,", "state space qbit. :param z0: (1, 0) (Computational basis) :param z1: (0, 1)", "!= other.hm1 or \\ self.v0 != other.v0 or self.v1 != other.v1 def __neg__(self):", "\\ self.v0 != other.v0 or self.v1 != other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0,", "BoolRef = False, h1: BoolRef = False, zm0: BoolRef = False, zm1: BoolRef", "conjunction = And([getattr(self, v) == (v == attribute) for v in attributes]) conjunctions.append(conjunction)", "attributes = ['z0', 'z1'] else: attributes = ['z0', 'z1', 'v0', 'v1'] # ['z0',", "== attribute) for v in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): \"\"\" :return:", "other.h0 or self.h1 != other.h1 or \\ self.zm0 != other.zm0 or self.zm1 !=", "v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if not isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0,", "% identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' %", "(1, -1)/sqrt(2) :param zm0: (-1, 0) :param zm1: (0, -1) :param hm0: (-1,", "= False, zm0: BoolRef = False, zm1: BoolRef = False, hm0: BoolRef =", "Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier), Bool('%s.v0'", "z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier()", "\"\"\" Constructor for a reduced state space qbit. :param z0: (1, 0) (Computational", "['z0', 'z1', 'v0', 'v1'] # ['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1',", "\\ self.h0 == other.h0 and self.h1 == other.h1 and \\ self.zm0 == other.zm0", "named rqbit. :param identifier: chosen identifier. :return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0' % identifier),", "zm1: BoolRef = False, hm0: BoolRef = False, hm1: BoolRef = False, v0:", "named rqbits. :param identifiers: chose identifiers. :return: List of rqbits. \"\"\" return [RQbit(identifier)", "= False, v1: BoolRef = False): \"\"\" Constructor for a reduced state space", "= [] for attribute in attributes: conjunction = And([getattr(self, v) == (v ==", "self.v0 != other.v0 or self.v1 != other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1,", "1) :param h0: (1, 1)/sqrt(2) (Hadamard basis) :param h1: (1, -1)/sqrt(2) :param zm0:", "return Or(conjunctions) def get_identifier(self): \"\"\" :return: Identifier. \"\"\" return str(self.z0).split('.')[0] def RQbit(identifier: str)", "self.z1 == other.z1 and \\ self.h0 == other.h0 and self.h1 == other.h1 and", "return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self):", "chosen identifier. :return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier), Bool('%s.h0'", "= False, hm0: BoolRef = False, hm1: BoolRef = False, v0: BoolRef =", "from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def __init__(self, z0: BoolRef = False, z1:", "self.zm1 = zm1 self.hm0 = hm0 self.hm1 = hm1 self.v0 = v0 self.v1", "self.z0 = z0 self.z1 = z1 self.h0 = h0 self.h1 = h1 self.zm0", "return self.get_identifier() def get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes = ['z0', 'z1'] else:", "raise ValueError('Exactly one parameter has to be one.') self.z0 = z0 self.z1 =", "for attribute in attributes: conjunction = And([getattr(self, v) == (v == attribute) for", "def get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes = ['z0', 'z1'] else: attributes =", "attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): \"\"\" :return: Identifier. \"\"\" return str(self.z0).split('.')[0] def", "to be one.') self.z0 = z0 self.z1 = z1 self.h0 = h0 self.h1", "__repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes = ['z0', 'z1']", "-1)/sqrt(2) :param zm0: (-1, 0) :param zm1: (0, -1) :param hm0: (-1, -1)/sqrt(2)", ":param v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if not isinstance(z0, BoolRef) and sum(np.array([z0, z1,", "1: raise ValueError('Exactly one parameter has to be one.') self.z0 = z0 self.z1", "zm1: (0, -1) :param hm0: (-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param v0:", ":return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier), Bool('%s.h0' % identifier),", "= zm0 self.zm1 = zm1 self.hm0 = hm0 self.hm1 = hm1 self.v0 =", "__init__(self, z0: BoolRef = False, z1: BoolRef = False, h0: BoolRef = False,", "and \\ self.hm0 == other.hm0 and self.hm1 == other.hm1 and \\ self.v0 ==", "!= other.hm0 or self.hm1 != other.hm1 or \\ self.v0 != other.v0 or self.v1", "'v1'] conjunctions = [] for attribute in attributes: conjunction = And([getattr(self, v) ==", "BoolRef = False, zm0: BoolRef = False, zm1: BoolRef = False, hm0: BoolRef", "(Hadamard basis) :param h1: (1, -1)/sqrt(2) :param zm0: (-1, 0) :param zm1: (0,", "Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier), Bool('%s.v1' % identifier)) def", "Bool, BoolRef, And, Or from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def __init__(self, z0:", "hm0: (-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param v1:", "self.v0 = v0 self.v1 = v1 def __eq__(self, other: 'RQbitVal'): return self.z0 ==", "\"\"\" :return: Identifier. \"\"\" return str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal: \"\"\" Generate", "attribute) for v in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): \"\"\" :return: Identifier.", "self.hm0 = hm0 self.hm1 = hm1 self.v0 = v0 self.v1 = v1 def", "and self.hm1 == other.hm1 and \\ self.v0 == other.v0 and self.v1 == other.v1", "return str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal: \"\"\" Generate a named rqbit. :param", "zm1, hm0, hm1, v0, v1], dtype=int)) != 1: raise ValueError('Exactly one parameter has", "h1, zm0, zm1, hm0, hm1, v0, v1], dtype=int)) != 1: raise ValueError('Exactly one", "!= other.zm1 or \\ self.hm0 != other.hm0 or self.hm1 != other.hm1 or \\", "(v == attribute) for v in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): \"\"\"", "def RQbit(identifier: str) -> RQbitVal: \"\"\" Generate a named rqbit. :param identifier: chosen", "self.h0 == other.h0 and self.h1 == other.h1 and \\ self.zm0 == other.zm0 and", "other.h0 and self.h1 == other.h1 and \\ self.zm0 == other.zm0 and self.zm1 ==", "chose identifiers. :return: List of rqbits. \"\"\" return [RQbit(identifier) for identifier in identifiers]", "False, hm0: BoolRef = False, hm1: BoolRef = False, v0: BoolRef = False,", "Identifier. \"\"\" return str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal: \"\"\" Generate a named", "sum(np.array([z0, z1, h0, h1, zm0, zm1, hm0, hm1, v0, v1], dtype=int)) != 1:", "h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier() def get_constraints(self,", "other.h1 and \\ self.zm0 == other.zm0 and self.zm1 == other.zm1 and \\ self.hm0", "other.zm1 or \\ self.hm0 != other.hm0 or self.hm1 != other.hm1 or \\ self.v0", "['z0', 'z1'] else: attributes = ['z0', 'z1', 'v0', 'v1'] # ['z0', 'z1', 'h0',", "= ['z0', 'z1'] else: attributes = ['z0', 'z1', 'v0', 'v1'] # ['z0', 'z1',", "BoolRef = False, h0: BoolRef = False, h1: BoolRef = False, zm0: BoolRef", "conjunctions = [] for attribute in attributes: conjunction = And([getattr(self, v) == (v", "for v in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): \"\"\" :return: Identifier. \"\"\"", "% identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\" Generate many", "False, v1: BoolRef = False): \"\"\" Constructor for a reduced state space qbit.", "\\ self.v0 == other.v0 and self.v1 == other.v1 def __neq__(self, other: 'RQbitVal'): return", "0) (Computational basis) :param z1: (0, 1) :param h0: (1, 1)/sqrt(2) (Hadamard basis)", "basis) :param h1: (1, -1)/sqrt(2) :param zm0: (-1, 0) :param zm1: (0, -1)", "or \\ self.h0 != other.h0 or self.h1 != other.h1 or \\ self.zm0 !=", "z3 import Bool, BoolRef, And, Or from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def", "== other.h1 and \\ self.zm0 == other.zm0 and self.zm1 == other.zm1 and \\", "other.z1 or \\ self.h0 != other.h0 or self.h1 != other.h1 or \\ self.zm0", "% identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier), Bool('%s.v1' %", "z1: BoolRef = False, h0: BoolRef = False, h1: BoolRef = False, zm0:", "\"\"\" return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier), Bool('%s.h0' % identifier), Bool('%s.h1' %", "False, v0: BoolRef = False, v1: BoolRef = False): \"\"\" Constructor for a", "!= other.h0 or self.h1 != other.h1 or \\ self.zm0 != other.zm0 or self.zm1", "space qbit. :param z0: (1, 0) (Computational basis) :param z1: (0, 1) :param", "zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only: False):", "hm0: BoolRef = False, hm1: BoolRef = False, v0: BoolRef = False, v1:", "'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions = [] for attribute in", "h1 self.zm0 = zm0 self.zm1 = zm1 self.hm0 = hm0 self.hm1 = hm1", "identifier), Bool('%s.h0' % identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier),", "other.v0 or self.v1 != other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1,", "identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\" Generate many named", "import List import numpy as np from z3 import Bool, BoolRef, And, Or", "!= other.zm0 or self.zm1 != other.zm1 or \\ self.hm0 != other.hm0 or self.hm1", "= z1 self.h0 = h0 self.h1 = h1 self.zm0 = zm0 self.zm1 =", "and self.h1 == other.h1 and \\ self.zm0 == other.zm0 and self.zm1 == other.zm1", "qbit. :param z0: (1, 0) (Computational basis) :param z1: (0, 1) :param h0:", "v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if not isinstance(z0, BoolRef)", "False, zm1: BoolRef = False, hm0: BoolRef = False, hm1: BoolRef = False,", "== other.hm1 and \\ self.v0 == other.v0 and self.v1 == other.v1 def __neq__(self,", "z1: (0, 1) :param h0: (1, 1)/sqrt(2) (Hadamard basis) :param h1: (1, -1)/sqrt(2)", "and sum(np.array([z0, z1, h0, h1, zm0, zm1, hm0, hm1, v0, v1], dtype=int)) !=", "zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only:", "False, z1: BoolRef = False, h0: BoolRef = False, h1: BoolRef = False,", "def __neq__(self, other: 'RQbitVal'): return self.z0 != other.z0 or self.z1 != other.z1 or", "List import numpy as np from z3 import Bool, BoolRef, And, Or from", "v in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): \"\"\" :return: Identifier. \"\"\" return", "identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str])", "= hm1 self.v0 = v0 self.v1 = v1 def __eq__(self, other: 'RQbitVal'): return", "'RQbitVal'): return self.z0 != other.z0 or self.z1 != other.z1 or \\ self.h0 !=", "or \\ self.hm0 != other.hm0 or self.hm1 != other.hm1 or \\ self.v0 !=", "self.h1 == other.h1 and \\ self.zm0 == other.zm0 and self.zm1 == other.zm1 and", "conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): \"\"\" :return: Identifier. \"\"\" return str(self.z0).split('.')[0] def RQbit(identifier:", "h0 self.h1 = h1 self.zm0 = zm0 self.zm1 = zm1 self.hm0 = hm0", "\"\"\" super().__init__() if not isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0, h1, zm0, zm1,", "v1=self.v0) def __repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes =", "h0: (1, 1)/sqrt(2) (Hadamard basis) :param h1: (1, -1)/sqrt(2) :param zm0: (-1, 0)", "False): \"\"\" Constructor for a reduced state space qbit. :param z0: (1, 0)", "== other.v0 and self.v1 == other.v1 def __neq__(self, other: 'RQbitVal'): return self.z0 !=", "def __init__(self, z0: BoolRef = False, z1: BoolRef = False, h0: BoolRef =", "(0, 1) :param h0: (1, 1)/sqrt(2) (Hadamard basis) :param h1: (1, -1)/sqrt(2) :param", "v1], dtype=int)) != 1: raise ValueError('Exactly one parameter has to be one.') self.z0", "self.get_identifier() def get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes = ['z0', 'z1'] else: attributes", "RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return", "1)/sqrt(2) (Hadamard basis) :param h1: (1, -1)/sqrt(2) :param zm0: (-1, 0) :param zm1:", "BoolRef, And, Or from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def __init__(self, z0: BoolRef", "== other.hm0 and self.hm1 == other.hm1 and \\ self.v0 == other.v0 and self.v1", "self.h1 = h1 self.zm0 = zm0 self.zm1 = zm1 self.hm0 = hm0 self.hm1", "self.h0 != other.h0 or self.h1 != other.h1 or \\ self.zm0 != other.zm0 or", "Generate many named rqbits. :param identifiers: chose identifiers. :return: List of rqbits. \"\"\"", "False): if computational_basis_only: attributes = ['z0', 'z1'] else: attributes = ['z0', 'z1', 'v0',", "in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): \"\"\" :return: Identifier. \"\"\" return str(self.z0).split('.')[0]", "self.zm1 == other.zm1 and \\ self.hm0 == other.hm0 and self.hm1 == other.hm1 and", "(-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if", "Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1'", "self.h0 = h0 self.h1 = h1 self.zm0 = zm0 self.zm1 = zm1 self.hm0", "% identifier), Bool('%s.v0' % identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]:", "h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier() def", "hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only: False): if computational_basis_only:", "\\ self.h0 != other.h0 or self.h1 != other.h1 or \\ self.zm0 != other.zm0", "class RQbitVal(QbitVal): def __init__(self, z0: BoolRef = False, z1: BoolRef = False, h0:", "other.hm0 and self.hm1 == other.hm1 and \\ self.v0 == other.v0 and self.v1 ==", "other.v0 and self.v1 == other.v1 def __neq__(self, other: 'RQbitVal'): return self.z0 != other.z0", "((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if not isinstance(z0, BoolRef) and", "<reponame>fabianbauermarquart/quavl<filename>quavl/lib/expressions/rqbit.py<gh_stars>0 from typing import List import numpy as np from z3 import Bool,", "and \\ self.h0 == other.h0 and self.h1 == other.h1 and \\ self.zm0 ==", "QbitVal class RQbitVal(QbitVal): def __init__(self, z0: BoolRef = False, z1: BoolRef = False,", "(1+1j/2) \"\"\" super().__init__() if not isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0, h1, zm0,", "identifier: chosen identifier. :return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier),", "False, h1: BoolRef = False, zm0: BoolRef = False, zm1: BoolRef = False,", "== other.zm0 and self.zm1 == other.zm1 and \\ self.hm0 == other.hm0 and self.hm1", "computational_basis_only: False): if computational_basis_only: attributes = ['z0', 'z1'] else: attributes = ['z0', 'z1',", "identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier), Bool('%s.v1' % identifier))", "[] for attribute in attributes: conjunction = And([getattr(self, v) == (v == attribute)", ":param identifier: chosen identifier. :return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' %", "v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes", "RQbit. \"\"\" return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier), Bool('%s.h0' % identifier), Bool('%s.h1'", "hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__()", "Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str]) ->", "z0 self.z1 = z1 self.h0 = h0 self.h1 = h1 self.zm0 = zm0", "self.v1 == other.v1 def __neq__(self, other: 'RQbitVal'): return self.z0 != other.z0 or self.z1", "or \\ self.v0 != other.v0 or self.v1 != other.v1 def __neg__(self): return RQbitVal(z0=self.z1,", "def get_identifier(self): \"\"\" :return: Identifier. \"\"\" return str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal:", "zm0 self.zm1 = zm1 self.hm0 = hm0 self.hm1 = hm1 self.v0 = v0", "RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\" Generate many named rqbits. :param identifiers: chose identifiers.", "hm1, v0, v1], dtype=int)) != 1: raise ValueError('Exactly one parameter has to be", "from typing import List import numpy as np from z3 import Bool, BoolRef,", "computational_basis_only: attributes = ['z0', 'z1'] else: attributes = ['z0', 'z1', 'v0', 'v1'] #", "'v0', 'v1'] conjunctions = [] for attribute in attributes: conjunction = And([getattr(self, v)", ":param hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) \"\"\"", "and self.z1 == other.z1 and \\ self.h0 == other.h0 and self.h1 == other.h1", "'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions = [] for", "= ['z0', 'z1', 'v0', 'v1'] # ['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0',", "identifier), Bool('%s.v0' % identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\"", "self.hm1 != other.hm1 or \\ self.v0 != other.v0 or self.v1 != other.v1 def", "hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only: False): if", "Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\" Generate many named rqbits.", ":param h1: (1, -1)/sqrt(2) :param zm0: (-1, 0) :param zm1: (0, -1) :param", "# ['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions =", "many named rqbits. :param identifiers: chose identifiers. :return: List of rqbits. \"\"\" return", "return self.z0 == other.z0 and self.z1 == other.z1 and \\ self.h0 == other.h0", "BoolRef = False): \"\"\" Constructor for a reduced state space qbit. :param z0:", "BoolRef = False, z1: BoolRef = False, h0: BoolRef = False, h1: BoolRef", "self.zm0 != other.zm0 or self.zm1 != other.zm1 or \\ self.hm0 != other.hm0 or", "h0, h1, zm0, zm1, hm0, hm1, v0, v1], dtype=int)) != 1: raise ValueError('Exactly", ":return: Identifier. \"\"\" return str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal: \"\"\" Generate a", "rqbits. :param identifiers: chose identifiers. :return: List of rqbits. \"\"\" return [RQbit(identifier) for", "import numpy as np from z3 import Bool, BoolRef, And, Or from quavl.lib.expressions.qbit", "__eq__(self, other: 'RQbitVal'): return self.z0 == other.z0 and self.z1 == other.z1 and \\", "hm1: BoolRef = False, v0: BoolRef = False, v1: BoolRef = False): \"\"\"", ":param z0: (1, 0) (Computational basis) :param z1: (0, 1) :param h0: (1,", "== other.z1 and \\ self.h0 == other.h0 and self.h1 == other.h1 and \\", "numpy as np from z3 import Bool, BoolRef, And, Or from quavl.lib.expressions.qbit import", "or self.zm1 != other.zm1 or \\ self.hm0 != other.hm0 or self.hm1 != other.hm1", "'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions = [] for attribute", "else: attributes = ['z0', 'z1', 'v0', 'v1'] # ['z0', 'z1', 'h0', 'h1', 'zm0',", "'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions = [] for attribute in attributes: conjunction", "self.hm1 = hm1 self.v0 = v0 self.v1 = v1 def __eq__(self, other: 'RQbitVal'):", "self.zm1 != other.zm1 or \\ self.hm0 != other.hm0 or self.hm1 != other.hm1 or", "self.z1 = z1 self.h0 = h0 self.h1 = h1 self.zm0 = zm0 self.zm1", "identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\" Generate many named rqbits. :param identifiers:", "= And([getattr(self, v) == (v == attribute) for v in attributes]) conjunctions.append(conjunction) return", "(1, 0) (Computational basis) :param z1: (0, 1) :param h0: (1, 1)/sqrt(2) (Hadamard", "if not isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0, h1, zm0, zm1, hm0, hm1,", "\"\"\" Generate many named rqbits. :param identifiers: chose identifiers. :return: List of rqbits.", "(1, 1)/sqrt(2) (Hadamard basis) :param h1: (1, -1)/sqrt(2) :param zm0: (-1, 0) :param", "= False, hm1: BoolRef = False, v0: BoolRef = False, v1: BoolRef =", "(-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2,", "and self.v1 == other.v1 def __neq__(self, other: 'RQbitVal'): return self.z0 != other.z0 or", "'v0', 'v1'] # ['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1']", "'z1', 'v0', 'v1'] # ['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0',", "hm0 self.hm1 = hm1 self.v0 = v0 self.v1 = v1 def __eq__(self, other:", "BoolRef = False, v1: BoolRef = False): \"\"\" Constructor for a reduced state", "other.zm1 and \\ self.hm0 == other.hm0 and self.hm1 == other.hm1 and \\ self.v0", "% identifier), Bool('%s.h0' % identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' %", "def RQbits(identifiers: List[str]) -> List[RQbitVal]: \"\"\" Generate many named rqbits. :param identifiers: chose", "and self.zm1 == other.zm1 and \\ self.hm0 == other.hm0 and self.hm1 == other.hm1", ":param z1: (0, 1) :param h0: (1, 1)/sqrt(2) (Hadamard basis) :param h1: (1,", "z1, h0, h1, zm0, zm1, hm0, hm1, v0, v1], dtype=int)) != 1: raise", "h1: BoolRef = False, zm0: BoolRef = False, zm1: BoolRef = False, hm0:", "v) == (v == attribute) for v in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def", "self.zm0 = zm0 self.zm1 = zm1 self.hm0 = hm0 self.hm1 = hm1 self.v0", "'v1'] # ['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions", "other.zm0 and self.zm1 == other.zm1 and \\ self.hm0 == other.hm0 and self.hm1 ==", "other: 'RQbitVal'): return self.z0 == other.z0 and self.z1 == other.z1 and \\ self.h0", "ValueError('Exactly one parameter has to be one.') self.z0 = z0 self.z1 = z1", "0) :param zm1: (0, -1) :param hm0: (-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2)", "self.v1 != other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1,", "def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0)", "Or from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def __init__(self, z0: BoolRef = False,", "(Computational basis) :param z1: (0, 1) :param h0: (1, 1)/sqrt(2) (Hadamard basis) :param", "== (v == attribute) for v in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self):", "self.hm0 != other.hm0 or self.hm1 != other.hm1 or \\ self.v0 != other.v0 or", "-1) :param hm0: (-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2)", "= hm0 self.hm1 = hm1 self.v0 = v0 self.v1 = v1 def __eq__(self,", "get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes = ['z0', 'z1'] else: attributes = ['z0',", "'RQbitVal'): return self.z0 == other.z0 and self.z1 == other.z1 and \\ self.h0 ==", "\\ self.zm0 != other.zm0 or self.zm1 != other.zm1 or \\ self.hm0 != other.hm0", "!= 1: raise ValueError('Exactly one parameter has to be one.') self.z0 = z0", "!= other.v0 or self.v1 != other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0,", "v1 def __eq__(self, other: 'RQbitVal'): return self.z0 == other.z0 and self.z1 == other.z1", "def __repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes = ['z0',", "-> RQbitVal: \"\"\" Generate a named rqbit. :param identifier: chosen identifier. :return: RQbit.", "parameter has to be one.') self.z0 = z0 self.z1 = z1 self.h0 =", "other: 'RQbitVal'): return self.z0 != other.z0 or self.z1 != other.z1 or \\ self.h0", "-> List[RQbitVal]: \"\"\" Generate many named rqbits. :param identifiers: chose identifiers. :return: List", "quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def __init__(self, z0: BoolRef = False, z1: BoolRef", "RQbitVal: \"\"\" Generate a named rqbit. :param identifier: chosen identifier. :return: RQbit. \"\"\"", "1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) \"\"\" super().__init__() if not", "return self.z0 != other.z0 or self.z1 != other.z1 or \\ self.h0 != other.h0", "RQbit(identifier: str) -> RQbitVal: \"\"\" Generate a named rqbit. :param identifier: chosen identifier.", "!= other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0,", "rqbit. :param identifier: chosen identifier. :return: RQbit. \"\"\" return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1'", ":param identifiers: chose identifiers. :return: List of rqbits. \"\"\" return [RQbit(identifier) for identifier", "BoolRef = False, v0: BoolRef = False, v1: BoolRef = False): \"\"\" Constructor", "= h1 self.zm0 = zm0 self.zm1 = zm1 self.hm0 = hm0 self.hm1 =", "other.hm1 and \\ self.v0 == other.v0 and self.v1 == other.v1 def __neq__(self, other:", "BoolRef = False, zm1: BoolRef = False, hm0: BoolRef = False, hm1: BoolRef", "typing import List import numpy as np from z3 import Bool, BoolRef, And,", "if computational_basis_only: attributes = ['z0', 'z1'] else: attributes = ['z0', 'z1', 'v0', 'v1']", "zm0: (-1, 0) :param zm1: (0, -1) :param hm0: (-1, -1)/sqrt(2) :param hm1:", "v0, v1], dtype=int)) != 1: raise ValueError('Exactly one parameter has to be one.')", "['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions = []", "basis) :param z1: (0, 1) :param h0: (1, 1)/sqrt(2) (Hadamard basis) :param h1:", "= z0 self.z1 = z1 self.h0 = h0 self.h1 = h1 self.zm0 =", "isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0, h1, zm0, zm1, hm0, hm1, v0, v1],", "str) -> RQbitVal: \"\"\" Generate a named rqbit. :param identifier: chosen identifier. :return:", "\"\"\" return str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal: \"\"\" Generate a named rqbit.", "False, hm1: BoolRef = False, v0: BoolRef = False, v1: BoolRef = False):", "= h0 self.h1 = h1 self.zm0 = zm0 self.zm1 = zm1 self.hm0 =", "a reduced state space qbit. :param z0: (1, 0) (Computational basis) :param z1:", "% identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers:", "self.z1 != other.z1 or \\ self.h0 != other.h0 or self.h1 != other.h1 or" ]
[ "Bunch of country related function to help get extended data. \"\"\" import json", "country codes file and store into variable countries_file = open('assets/country-codes.json') countries = json.load(countries_file)", "\"\"\" for country in countries: if country['code'].lower() == country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search):", "to be called via the app (front-end) \"\"\" disable = True if disable", "= \"\" return description def insert_countries(db_func): for country in countries: record = dict()", "cities: record = dict() record['name'] = city['name'] record['code'] = city['country'] record['country'] = get_country_name(city['country'])", "country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using wikipedia api to fetch descriptions \"\"\"", "json import requests # Load country codes file and store into variable countries_file", "cc.lower() lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name): \"\"\"", "= json.load(countries_file) countries_file.close() # Load country lat and long file and store into", "record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json') as", "deactivate this as I want this up and running quickly. Descriptions will have", "wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' + '&prop=extracts' + '&exintro=' +", "+ '&explaintext=' + '&titles={query}' .format(query=search)) response = wiki_req.json() pages = response['query']['pages'] description =", "country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name): \"\"\" with country name", "lat and long \"\"\" cc_key = cc.lower() lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long']", "of country related function to help get extended data. \"\"\" import json import", "in countries: if country['name'].lower() == country_name.lower(): return country['code'].upper() def get_country_name(country_code): \"\"\" using the", "and long file and store into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file)", "\"\"\" \"\"\" It's found that wikipedia's api is too slow that it takes", "= json.load(cities_file) for city in cities: record = dict() record['name'] = city['name'] record['code']", "extended data. \"\"\" import json import requests # Load country codes file and", "\"\"\" It's found that wikipedia's api is too slow that it takes a", "'&titles={query}' .format(query=search)) response = wiki_req.json() pages = response['query']['pages'] description = \"\" for value", "'extract' in value: description = value['extract'] else: description = \"\" break else: description", "\"\"\" using country code find lat and long \"\"\" cc_key = cc.lower() lat", "\"\"\" using the country code, find the name \"\"\" for country in countries:", "file and store into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def", "\"\"\" with country name find the country code \"\"\" for country in countries:", "via the app (front-end) \"\"\" disable = True if disable is False: wiki_req", "# Load country codes file and store into variable countries_file = open('assets/country-codes.json') countries", "app (front-end) \"\"\" disable = True if disable is False: wiki_req = requests.get(", "description = \"\" return description def insert_countries(db_func): for country in countries: record =", "json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\" using country code find lat and long \"\"\"", "if disable is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' +", "to deactivate this as I want this up and running quickly. Descriptions will", "get_country_code(country_name): \"\"\" with country name find the country code \"\"\" for country in", "as I want this up and running quickly. Descriptions will have to be", "lat, lng def get_country_code(country_name): \"\"\" with country name find the country code \"\"\"", "insert_countries(db_func): for country in countries: record = dict() record['name'] = country['name'] record['code'] =", "countries_file.close() # Load country lat and long file and store into variable country_latlng_file", "pages = response['query']['pages'] description = \"\" for value in pages.values(): if 'extract' in", "country['name'].lower() == country_name.lower(): return country['code'].upper() def get_country_name(country_code): \"\"\" using the country code, find", "countries: record = dict() record['name'] = country['name'] record['code'] = country['code'] record['lat'], record['lng'] =", "open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() # Load country lat and long file and", "= response['query']['pages'] description = \"\" for value in pages.values(): if 'extract' in value:", "and running quickly. Descriptions will have to be called via the app (front-end)", "else: description = \"\" break else: description = \"\" return description def insert_countries(db_func):", "lng = country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name): \"\"\" with country name find", "have to be called via the app (front-end) \"\"\" disable = True if", "+ '&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search)) response = wiki_req.json() pages = response['query']['pages']", "api is too slow that it takes a lot of time to ingest", "True if disable is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query'", "description def insert_countries(db_func): for country in countries: record = dict() record['name'] = country['name']", "country lat and long file and store into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng", "= \"\" for value in pages.values(): if 'extract' in value: description = value['extract']", "if country['name'].lower() == country_name.lower(): return country['code'].upper() def get_country_name(country_code): \"\"\" using the country code,", "country['code'].upper() def get_country_name(country_code): \"\"\" using the country code, find the name \"\"\" for", "in countries: if country['code'].lower() == country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using wikipedia", "Load country lat and long file and store into variable country_latlng_file = open('assets/countrycode-latlong.json')", "def get_country_code(country_name): \"\"\" with country name find the country code \"\"\" for country", "that it takes a lot of time to ingest the data, for now", "will have to be called via the app (front-end) \"\"\" disable = True", "country['name'] record['code'] = country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def", "this up and running quickly. Descriptions will have to be called via the", "country in countries: record = dict() record['name'] = country['name'] record['code'] = country['code'] record['lat'],", "It's found that wikipedia's api is too slow that it takes a lot", "countries: if country['code'].lower() == country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using wikipedia api", "slow that it takes a lot of time to ingest the data, for", "in pages.values(): if 'extract' in value: description = value['extract'] else: description = \"\"", "for value in pages.values(): if 'extract' in value: description = value['extract'] else: description", "data. \"\"\" import json import requests # Load country codes file and store", "'&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search)) response = wiki_req.json() pages = response['query']['pages'] description", "= get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json') as cities_file: cities = json.load(cities_file) for", "help get extended data. \"\"\" import json import requests # Load country codes", "return country['code'].upper() def get_country_name(country_code): \"\"\" using the country code, find the name \"\"\"", "record['name'] = city['name'] record['code'] = city['country'] record['country'] = get_country_name(city['country']) record['lat'] = city['lat'] record['lng']", "name find the country code \"\"\" for country in countries: if country['name'].lower() ==", "using the country code, find the name \"\"\" for country in countries: if", "country name find the country code \"\"\" for country in countries: if country['name'].lower()", "return country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using wikipedia api to fetch descriptions \"\"\" \"\"\"", "too slow that it takes a lot of time to ingest the data,", "countries = json.load(countries_file) countries_file.close() # Load country lat and long file and store", "long file and store into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close()", "Using wikipedia api to fetch descriptions \"\"\" \"\"\" It's found that wikipedia's api", "now I decided to deactivate this as I want this up and running", "country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using wikipedia api to fetch descriptions \"\"\" \"\"\" It's", "if country['code'].lower() == country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using wikipedia api to", "if 'extract' in value: description = value['extract'] else: description = \"\" break else:", "country['code'].lower() == country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using wikipedia api to fetch", "cc_key = cc.lower() lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat, lng def", "+ '&action=query' + '&prop=extracts' + '&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search)) response =", "import json import requests # Load country codes file and store into variable", "False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' + '&prop=extracts' + '&exintro='", "get_country_name(country_code): \"\"\" using the country code, find the name \"\"\" for country in", "for country in countries: if country['code'].lower() == country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): \"\"\"", "json.load(cities_file) for city in cities: record = dict() record['name'] = city['name'] record['code'] =", "wiki_req.json() pages = response['query']['pages'] description = \"\" for value in pages.values(): if 'extract'", "description = value['extract'] else: description = \"\" break else: description = \"\" return", "record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json') as cities_file:", "long \"\"\" cc_key = cc.lower() lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat,", "wikipedia api to fetch descriptions \"\"\" \"\"\" It's found that wikipedia's api is", "\"\"\" disable = True if disable is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' +", "file and store into variable countries_file = open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() #", "'&prop=extracts' + '&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search)) response = wiki_req.json() pages =", "description = \"\" for value in pages.values(): if 'extract' in value: description =", "json.load(countries_file) countries_file.close() # Load country lat and long file and store into variable", "cities = json.load(cities_file) for city in cities: record = dict() record['name'] = city['name']", "open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\" using country code find lat", "record['code'] = country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func):", "# Load country lat and long file and store into variable country_latlng_file =", "record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json') as cities_file: cities = json.load(cities_file)", "it takes a lot of time to ingest the data, for now I", "code \"\"\" for country in countries: if country['name'].lower() == country_name.lower(): return country['code'].upper() def", "\"\" return description def insert_countries(db_func): for country in countries: record = dict() record['name']", "= dict() record['name'] = country['name'] record['code'] = country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description']", "wikipedia's api is too slow that it takes a lot of time to", "descriptions \"\"\" \"\"\" It's found that wikipedia's api is too slow that it", "requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' + '&prop=extracts' + '&exintro=' + '&explaintext=' +", "\"\"\" for country in countries: if country['name'].lower() == country_name.lower(): return country['code'].upper() def get_country_name(country_code):", "related function to help get extended data. \"\"\" import json import requests #", "fetch descriptions \"\"\" \"\"\" It's found that wikipedia's api is too slow that", "description = \"\" break else: description = \"\" return description def insert_countries(db_func): for", "lat and long file and store into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng =", "time to ingest the data, for now I decided to deactivate this as", "be called via the app (front-end) \"\"\" disable = True if disable is", "lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name): \"\"\" with", "= city['country'] record['country'] = get_country_name(city['country']) record['lat'] = city['lat'] record['lng'] = city['lng'] record['description'] =", "#!/usr/bin/python3 \"\"\" Bunch of country related function to help get extended data. \"\"\"", "country in countries: if country['code'].lower() == country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using", "lot of time to ingest the data, for now I decided to deactivate", "\"\" break else: description = \"\" return description def insert_countries(db_func): for country in", "into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\" using", "with country name find the country code \"\"\" for country in countries: if", "+ '&titles={query}' .format(query=search)) response = wiki_req.json() pages = response['query']['pages'] description = \"\" for", "= country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with", "response = wiki_req.json() pages = response['query']['pages'] description = \"\" for value in pages.values():", ".format(query=search)) response = wiki_req.json() pages = response['query']['pages'] description = \"\" for value in", "def get_country_name(country_code): \"\"\" using the country code, find the name \"\"\" for country", "as cities_file: cities = json.load(cities_file) for city in cities: record = dict() record['name']", "value: description = value['extract'] else: description = \"\" break else: description = \"\"", "\"\"\" import json import requests # Load country codes file and store into", "record['code'] = city['country'] record['country'] = get_country_name(city['country']) record['lat'] = city['lat'] record['lng'] = city['lng'] record['description']", "codes file and store into variable countries_file = open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close()", "in countries: record = dict() record['name'] = country['name'] record['code'] = country['code'] record['lat'], record['lng']", "+ '?format=json' + '&action=query' + '&prop=extracts' + '&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search))", "else: description = \"\" return description def insert_countries(db_func): for country in countries: record", "takes a lot of time to ingest the data, for now I decided", "= True if disable is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' +", "open('assets/cities.json') as cities_file: cities = json.load(cities_file) for city in cities: record = dict()", "countries_file = open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() # Load country lat and long", "find lat and long \"\"\" cc_key = cc.lower() lat = country_latlng[cc_key]['lat'] lng =", "store into variable countries_file = open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() # Load country", "in value: description = value['extract'] else: description = \"\" break else: description =", "\"\"\" Using wikipedia api to fetch descriptions \"\"\" \"\"\" It's found that wikipedia's", "want this up and running quickly. Descriptions will have to be called via", "def insert_countries(db_func): for country in countries: record = dict() record['name'] = country['name'] record['code']", "country_name.lower(): return country['code'].upper() def get_country_name(country_code): \"\"\" using the country code, find the name", "== country_name.lower(): return country['code'].upper() def get_country_name(country_code): \"\"\" using the country code, find the", "name \"\"\" for country in countries: if country['code'].lower() == country_code.lower(): return country['name'].capitalize() def", "of time to ingest the data, for now I decided to deactivate this", "with open('assets/cities.json') as cities_file: cities = json.load(cities_file) for city in cities: record =", "the app (front-end) \"\"\" disable = True if disable is False: wiki_req =", "to help get extended data. \"\"\" import json import requests # Load country", "the name \"\"\" for country in countries: if country['code'].lower() == country_code.lower(): return country['name'].capitalize()", "def get_wikipedia_description(search): \"\"\" Using wikipedia api to fetch descriptions \"\"\" \"\"\" It's found", "import requests # Load country codes file and store into variable countries_file =", "find the name \"\"\" for country in countries: if country['code'].lower() == country_code.lower(): return", "is too slow that it takes a lot of time to ingest the", "find the country code \"\"\" for country in countries: if country['name'].lower() == country_name.lower():", "get extended data. \"\"\" import json import requests # Load country codes file", "= cc.lower() lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name):", "that wikipedia's api is too slow that it takes a lot of time", "Load country codes file and store into variable countries_file = open('assets/country-codes.json') countries =", "I want this up and running quickly. Descriptions will have to be called", "get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json') as cities_file: cities =", "db_func(record) def insert_cities(db_func): with open('assets/cities.json') as cities_file: cities = json.load(cities_file) for city in", "for city in cities: record = dict() record['name'] = city['name'] record['code'] = city['country']", "country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\" using country code", "countries: if country['name'].lower() == country_name.lower(): return country['code'].upper() def get_country_name(country_code): \"\"\" using the country", "I decided to deactivate this as I want this up and running quickly.", "called via the app (front-end) \"\"\" disable = True if disable is False:", "country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name): \"\"\" with country name find the country", "the data, for now I decided to deactivate this as I want this", "record = dict() record['name'] = country['name'] record['code'] = country['code'] record['lat'], record['lng'] = get_country_latlng(country['code'])", "= requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' + '&prop=extracts' + '&exintro=' + '&explaintext='", "= json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\" using country code find lat and long", "return description def insert_countries(db_func): for country in countries: record = dict() record['name'] =", "(front-end) \"\"\" disable = True if disable is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php'", "get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json') as cities_file: cities = json.load(cities_file) for city", "return lat, lng def get_country_code(country_name): \"\"\" with country name find the country code", "= get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json') as cities_file: cities", "the country code, find the name \"\"\" for country in countries: if country['code'].lower()", "country code find lat and long \"\"\" cc_key = cc.lower() lat = country_latlng[cc_key]['lat']", "record['name'] = country['name'] record['code'] = country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name'])", "= country['name'] record['code'] = country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record)", "record['country'] = get_country_name(city['country']) record['lat'] = city['lat'] record['lng'] = city['lng'] record['description'] = get_wikipedia_description(city['name']) db_func(record)", "city in cities: record = dict() record['name'] = city['name'] record['code'] = city['country'] record['country']", "country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json')", "and long \"\"\" cc_key = cc.lower() lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return", "running quickly. Descriptions will have to be called via the app (front-end) \"\"\"", "lng def get_country_code(country_name): \"\"\" with country name find the country code \"\"\" for", "'&explaintext=' + '&titles={query}' .format(query=search)) response = wiki_req.json() pages = response['query']['pages'] description = \"\"", "insert_cities(db_func): with open('assets/cities.json') as cities_file: cities = json.load(cities_file) for city in cities: record", "record = dict() record['name'] = city['name'] record['code'] = city['country'] record['country'] = get_country_name(city['country']) record['lat']", "city['name'] record['code'] = city['country'] record['country'] = get_country_name(city['country']) record['lat'] = city['lat'] record['lng'] = city['lng']", "dict() record['name'] = city['name'] record['code'] = city['country'] record['country'] = get_country_name(city['country']) record['lat'] = city['lat']", "country related function to help get extended data. \"\"\" import json import requests", "country in countries: if country['name'].lower() == country_name.lower(): return country['code'].upper() def get_country_name(country_code): \"\"\" using", "dict() record['name'] = country['name'] record['code'] = country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] =", "and store into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc):", "= country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name): \"\"\" with country name find the", "code find lat and long \"\"\" cc_key = cc.lower() lat = country_latlng[cc_key]['lat'] lng", "store into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\"", "for country in countries: if country['name'].lower() == country_name.lower(): return country['code'].upper() def get_country_name(country_code): \"\"\"", "= country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name): \"\"\" with country", "= open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\" using country code find", "== country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): \"\"\" Using wikipedia api to fetch descriptions", "to fetch descriptions \"\"\" \"\"\" It's found that wikipedia's api is too slow", "ingest the data, for now I decided to deactivate this as I want", "in cities: record = dict() record['name'] = city['name'] record['code'] = city['country'] record['country'] =", "found that wikipedia's api is too slow that it takes a lot of", "disable = True if disable is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json'", "city['country'] record['country'] = get_country_name(city['country']) record['lat'] = city['lat'] record['lng'] = city['lng'] record['description'] = get_wikipedia_description(city['name'])", "for country in countries: record = dict() record['name'] = country['name'] record['code'] = country['code']", "country_latlng_file.close() def get_country_latlng(cc): \"\"\" using country code find lat and long \"\"\" cc_key", "country code \"\"\" for country in countries: if country['name'].lower() == country_name.lower(): return country['code'].upper()", "\"\" for value in pages.values(): if 'extract' in value: description = value['extract'] else:", "get_country_latlng(cc): \"\"\" using country code find lat and long \"\"\" cc_key = cc.lower()", "def insert_cities(db_func): with open('assets/cities.json') as cities_file: cities = json.load(cities_file) for city in cities:", "= wiki_req.json() pages = response['query']['pages'] description = \"\" for value in pages.values(): if", "'&action=query' + '&prop=extracts' + '&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search)) response = wiki_req.json()", "= city['name'] record['code'] = city['country'] record['country'] = get_country_name(city['country']) record['lat'] = city['lat'] record['lng'] =", "+ '&prop=extracts' + '&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search)) response = wiki_req.json() pages", "value in pages.values(): if 'extract' in value: description = value['extract'] else: description =", "api to fetch descriptions \"\"\" \"\"\" It's found that wikipedia's api is too", "up and running quickly. Descriptions will have to be called via the app", "disable is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' + '&prop=extracts'", "= \"\" break else: description = \"\" return description def insert_countries(db_func): for country", "value['extract'] else: description = \"\" break else: description = \"\" return description def", "the country code \"\"\" for country in countries: if country['name'].lower() == country_name.lower(): return", "a lot of time to ingest the data, for now I decided to", "'?format=json' + '&action=query' + '&prop=extracts' + '&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search)) response", "= dict() record['name'] = city['name'] record['code'] = city['country'] record['country'] = get_country_name(city['country']) record['lat'] =", "country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\" using country code find lat and", "\"\"\" cc_key = cc.lower() lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat, lng", "into variable countries_file = open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() # Load country lat", "response['query']['pages'] description = \"\" for value in pages.values(): if 'extract' in value: description", "to ingest the data, for now I decided to deactivate this as I", "get_wikipedia_description(search): \"\"\" Using wikipedia api to fetch descriptions \"\"\" \"\"\" It's found that", "def get_country_latlng(cc): \"\"\" using country code find lat and long \"\"\" cc_key =", "function to help get extended data. \"\"\" import json import requests # Load", "Descriptions will have to be called via the app (front-end) \"\"\" disable =", "= value['extract'] else: description = \"\" break else: description = \"\" return description", "data, for now I decided to deactivate this as I want this up", "pages.values(): if 'extract' in value: description = value['extract'] else: description = \"\" break", "requests # Load country codes file and store into variable countries_file = open('assets/country-codes.json')", "for now I decided to deactivate this as I want this up and", "\"\"\" Bunch of country related function to help get extended data. \"\"\" import", "country code, find the name \"\"\" for country in countries: if country['code'].lower() ==", "code, find the name \"\"\" for country in countries: if country['code'].lower() == country_code.lower():", "using country code find lat and long \"\"\" cc_key = cc.lower() lat =", "quickly. Descriptions will have to be called via the app (front-end) \"\"\" disable", "is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' + '&prop=extracts' +", "and store into variable countries_file = open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() # Load", "variable countries_file = open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() # Load country lat and", "variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): \"\"\" using country", "'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' + '&prop=extracts' + '&exintro=' + '&explaintext=' + '&titles={query}'", "break else: description = \"\" return description def insert_countries(db_func): for country in countries:", "decided to deactivate this as I want this up and running quickly. Descriptions", "this as I want this up and running quickly. Descriptions will have to", "= open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() # Load country lat and long file", "cities_file: cities = json.load(cities_file) for city in cities: record = dict() record['name'] =" ]
[ "= out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = [] for index, out_tensor", "test_deserializer_get_network_output_binding_info(parser): # use 0 as a dummy value for layer_id, which is unused", "ann.IDeserializer.__swig_destroy__, \"There is a swig python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def", "runtime.LoadNetwork(opt_network) assert \"\" == messages # Load test image data stored in input_lite.npy", "def test_check_deserializer_swig_ownership(parser): # Check to see that SWIG has ownership for parser. This", "messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages) net_id, messages =", "out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = [] for index,", "with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for part of the", "def test_deserializer_get_network_input_binding_info(parser): # use 0 as a dummy value for layer_id, which is", "messages = runtime.LoadNetwork(opt_network) assert \"\" == messages # Load test image data stored", "© 2020 Arm Ltd and Contributors. All rights reserved. # SPDX-License-Identifier: MIT import", "test network to be used for the tests below \"\"\" parser = ann.IDeserializer()", "and setup the test network to be used for the tests below \"\"\"", "import os import pytest import pyarmnn as ann import numpy as np @pytest.fixture()", "Only check for part of the exception since the exception returns # absolute", "implementation layer_id = 0 output_name = \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check", "ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check to see that SWIG has ownership", "file for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output matches", "== 2 assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() ==", "test_deserializer_get_network_input_binding_info(parser): # use 0 as a dummy value for layer_id, which is unused", "returns # absolute path which will change on different machines. assert 'Cannot read", "actual implementation layer_id = 0 input_name = 'input_1' output_name = 'dense/Softmax' input_binding_info =", "unused in the actual implementation layer_id = 0 input_name = 'input_1' output_name =", "unused in the actual implementation layer_id = 0 output_name = \"dense/Softmax\" output_binding_info1 =", "len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert \"\" == messages # Load test image", "'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info", "= ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for part", "in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = []", "tensor1 = output_binding_info1[1] assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements()", "parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor info retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1]", "= 0 output_name = \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor", "== 128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0 as a", "parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)", "which will change on different machines. assert 'Cannot read the file' in str(err.value)", "the actual implementation layer_id = 0 output_name = \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name)", "== 2 assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() ==", "on different machines. assert 'Cannot read the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser", "def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is a swig python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__", "test image data stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info],", "def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0 as", "rights reserved. # SPDX-License-Identifier: MIT import os import pytest import pyarmnn as ann", "128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0 as a dummy", "np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output matches golden output assert (expected_outputs == output_vectors[0]).all()", "is unused in the actual implementation layer_id = 0 input_name = 'input_1' input_binding_info", "GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() == 2 assert", "parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is a swig python", "= ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages)", "expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output matches golden output assert (expected_outputs", "image data stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data])", "np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name)", "layer_id, which is unused in the actual implementation layer_id = 0 output_name =", "== 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0 as a dummy value for layer_id,", "ownership for parser. This instructs SWIG to take # ownership of the return", "# Load golden output file for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) #", "test_check_deserializer_swig_ownership(parser): # Check to see that SWIG has ownership for parser. This instructs", "out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = [] for", "is a swig python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): #", "golden output file for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that", "data stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors", "garbage-collected when it is no longer in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): #", "= np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id,", "# Only check for part of the exception since the exception returns #", "use 0 as a dummy value for layer_id, which is unused in the", "see that SWIG has ownership for parser. This instructs SWIG to take #", "in the actual implementation layer_id = 0 input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id,", "parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is", "output_binding_info1[1] assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() == 10", "assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check to see that SWIG has", "784 assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use", "ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert", "ann.OptimizerOptions()) assert 0 == len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert \"\" == messages", "assert 0 == len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert \"\" == messages #", "tests below \"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert", "opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages) net_id, messages", "exception since the exception returns # absolute path which will change on different", "tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale()", "no longer in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0 as a", "tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder):", "info retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions()", "Contributors. All rights reserved. # SPDX-License-Identifier: MIT import os import pytest import pyarmnn", "SWIG has ownership for parser. This instructs SWIG to take # ownership of", "== 4 assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() ==", "as np @pytest.fixture() def parser(shared_data_folder): \"\"\" Parse and setup the test network to", "it is no longer in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0", "out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = [] for index, out_tensor in", "Check the tensor info retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType() ==", "in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use", "absolute path which will change on different machines. assert 'Cannot read the file'", "output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor info retrieved from GetNetworkOutputBindingInfo tensor1", "actual implementation layer_id = 0 input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor", "# ownership of the return value. This allows the value to be automatically", "from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() == 2", "[] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info)))", "= ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is a", "def parser(shared_data_folder): \"\"\" Parse and setup the test network to be used for", "output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options =", "\"\"\" Parse and setup the test network to be used for the tests", "# Copyright © 2020 Arm Ltd and Contributors. All rights reserved. # SPDX-License-Identifier:", "and Contributors. All rights reserved. # SPDX-License-Identifier: MIT import os import pytest import", "which is unused in the actual implementation layer_id = 0 output_name = \"dense/Softmax\"", "np @pytest.fixture() def parser(shared_data_folder): \"\"\" Parse and setup the test network to be", "be used for the tests below \"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield", "used for the tests below \"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser", "import numpy as np @pytest.fixture() def parser(shared_data_folder): \"\"\" Parse and setup the test", "str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0", "assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() == 0.00390625 def", "is no longer in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0 as", "parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0 as a dummy", "@pytest.fixture() def parser(shared_data_folder): \"\"\" Parse and setup the test network to be used", "check for part of the exception since the exception returns # absolute path", "part of the exception since the exception returns # absolute path which will", "input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert tensor.GetDataType() ==", "for part of the exception since the exception returns # absolute path which", "preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert \"\"", "output_name) out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors", "since the exception returns # absolute path which will change on different machines.", "assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0 as a dummy value for layer_id,", "test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check", "parser = ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for", "ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id", "which is unused in the actual implementation layer_id = 0 input_name = 'input_1'", "= np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output matches golden output assert (expected_outputs ==", "tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with", "tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale()", "\"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There", "test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is a swig python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ ==", "ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for part of", "output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = [] for index, out_tensor in enumerate(output_tensors):", "layer_id = 0 output_name = \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the", "def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only", "assert \"\" == messages # Load test image data stored in input_lite.npy input_tensor_data", "net_id, messages = runtime.LoadNetwork(opt_network) assert \"\" == messages # Load test image data", "input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime =", "output_tensors) output_vectors = [] for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden", "yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is a swig python destructor defined\"", "= output_binding_info1[1] assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() ==", "<gh_stars>100-1000 # Copyright © 2020 Arm Ltd and Contributors. All rights reserved. #", "= [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages = ann.Optimize(network,", "= 'input_1' output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')]", "of the exception since the exception returns # absolute path which will change", "in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0 as a dummy value", "# garbage-collected when it is no longer in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser):", "ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages) net_id,", "MIT import os import pytest import pyarmnn as ann import numpy as np", "Load golden output file for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check", "== 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn'))", "\"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check to see that SWIG has ownership for parser.", "for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output matches golden", "tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0 as", "tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset()", "= ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1]", "reserved. # SPDX-License-Identifier: MIT import os import pytest import pyarmnn as ann import", "longer in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0 as a dummy", "parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for part of the exception since the exception", "output file for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output", "when it is no longer in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use", "os import pytest import pyarmnn as ann import numpy as np @pytest.fixture() def", "a dummy value for layer_id, which is unused in the actual implementation layer_id", "python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check to see", "parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0 as a dummy value for layer_id, which", "\"\" == messages # Load test image data stored in input_lite.npy input_tensor_data =", "import pytest import pyarmnn as ann import numpy as np @pytest.fixture() def parser(shared_data_folder):", "value to be automatically # garbage-collected when it is no longer in use", "parser. This instructs SWIG to take # ownership of the return value. This", "= 0 input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert", "file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) #", "the exception since the exception returns # absolute path which will change on", "test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0 as a", "input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages", "instructs SWIG to take # ownership of the return value. This allows the", "parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime = ann.IRuntime(options) opt_network,", "as ann import numpy as np @pytest.fixture() def parser(shared_data_folder): \"\"\" Parse and setup", "to be automatically # garbage-collected when it is no longer in use assert", "in the actual implementation layer_id = 0 input_name = 'input_1' output_name = 'dense/Softmax'", "assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def", "assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0", "different machines. assert 'Cannot read the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser =", "dummy value for layer_id, which is unused in the actual implementation layer_id =", "exception returns # absolute path which will change on different machines. assert 'Cannot", "= ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages) net_id, messages = runtime.LoadNetwork(opt_network)", "layer_id = 0 input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1]", "path which will change on different machines. assert 'Cannot read the file' in", "the tests below \"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy():", "Ltd and Contributors. All rights reserved. # SPDX-License-Identifier: MIT import os import pytest", "= \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor info retrieved from", "== 2 assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() ==", "tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder,", "unused in the actual implementation layer_id = 0 input_name = 'input_1' input_binding_info =", "input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info =", "numpy as np @pytest.fixture() def parser(shared_data_folder): \"\"\" Parse and setup the test network", "Arm Ltd and Contributors. All rights reserved. # SPDX-License-Identifier: MIT import os import", "== len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert \"\" == messages # Load test", "Parse and setup the test network to be used for the tests below", "def test_deserializer_get_network_output_binding_info(parser): # use 0 as a dummy value for layer_id, which is", "assert 'Cannot read the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network", "\"There is a swig python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser):", "0 == len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert \"\" == messages # Load", "out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors =", "input_name = 'input_1' output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'),", "'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert tensor.GetDataType() == 2 assert", "assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError) as err:", "== \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check to see that SWIG has ownership for", "import pyarmnn as ann import numpy as np @pytest.fixture() def parser(shared_data_folder): \"\"\" Parse", "result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output matches golden output", "'some_unknown_network.armnn')) # Only check for part of the exception since the exception returns", "output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0]", "ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0 as a dummy value for", "is unused in the actual implementation layer_id = 0 output_name = \"dense/Softmax\" output_binding_info1", "= [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id,", "has ownership for parser. This instructs SWIG to take # ownership of the", "All rights reserved. # SPDX-License-Identifier: MIT import os import pytest import pyarmnn as", "Load test image data stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors =", "layer_id, which is unused in the actual implementation layer_id = 0 input_name =", "input_binding_info[1] assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() == 784", "a swig python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check", "comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output matches golden output assert", "err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for part of the exception since the", "return value. This allows the value to be automatically # garbage-collected when it", "assert ann.IDeserializer.__swig_destroy__, \"There is a swig python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\"", "tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0 as a dummy value for", "0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0 as a dummy value for layer_id, which", "that SWIG has ownership for parser. This instructs SWIG to take # ownership", "[input_tensor_data]) output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id =", "allows the value to be automatically # garbage-collected when it is no longer", "output_vectors = [] for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output", "input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions()", "# SPDX-License-Identifier: MIT import os import pytest import pyarmnn as ann import numpy", "runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert \"\" ==", "messages # Load test image data stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy'))", "input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info", "below \"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__,", "network to be used for the tests below \"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder,", "'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime", "input_tensors, output_tensors) output_vectors = [] for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load", "output_name) # Check the tensor info retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert", "the return value. This allows the value to be automatically # garbage-collected when", "swig python destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check to", "\"mock_model.armnn\")) # use 0 as a dummy value for layer_id, which is unused", "ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is a swig", "runtime = ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 ==", "the value to be automatically # garbage-collected when it is no longer in", "0 as a dummy value for layer_id, which is unused in the actual", "stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors =", "2 assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() == 0", "is unused in the actual implementation layer_id = 0 input_name = 'input_1' output_name", "runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = [] for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) #", "Check to see that SWIG has ownership for parser. This instructs SWIG to", "parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions() == 4", "== 0 assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError)", "the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\"))", "SPDX-License-Identifier: MIT import os import pytest import pyarmnn as ann import numpy as", "0 output_name = \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor info", "assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() == 128 assert", "assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() == 784 assert", "for layer_id, which is unused in the actual implementation layer_id = 0 output_name", "= [] for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file", "assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0 as a dummy value", "output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy'))", "# use 0 as a dummy value for layer_id, which is unused in", "parser(shared_data_folder): \"\"\" Parse and setup the test network to be used for the", "2 assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() == 0.00390625", "change on different machines. assert 'Cannot read the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder):", "index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file for result comparison.", "implementation layer_id = 0 input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor =", "This allows the value to be automatically # garbage-collected when it is no", "setup the test network to be used for the tests below \"\"\" parser", "4 assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() == 0.007843137718737125", "assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() == 10 assert", "0 input_name = 'input_1' output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends =", "== 784 assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): #", "use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0 as a dummy value for", "to be used for the tests below \"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn'))", "to see that SWIG has ownership for parser. This instructs SWIG to take", "tensor = input_binding_info[1] assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements()", "take # ownership of the return value. This allows the value to be", "= runtime.LoadNetwork(opt_network) assert \"\" == messages # Load test image data stored in", "the test network to be used for the tests below \"\"\" parser =", "= ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert", "= 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert tensor.GetDataType() == 2", "ann import numpy as np @pytest.fixture() def parser(shared_data_folder): \"\"\" Parse and setup the", "for the tests below \"\"\" parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def", "the tensor info retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType() == 2", "in the actual implementation layer_id = 0 output_name = \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id,", "as a dummy value for layer_id, which is unused in the actual implementation", "= parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions() ==", "parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is a swig python destructor defined\" assert", "0 assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError) as", "parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0 as a dummy value for layer_id, which is", "options = ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions())", "ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0", "# Check the tensor info retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType()", "= parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor info retrieved from GetNetworkOutputBindingInfo tensor1 =", "for layer_id, which is unused in the actual implementation layer_id = 0 input_name", "== 10 assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser", "0 input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert tensor.GetDataType()", "0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) #", "to take # ownership of the return value. This allows the value to", "2020 Arm Ltd and Contributors. All rights reserved. # SPDX-License-Identifier: MIT import os", "layer_id = 0 input_name = 'input_1' output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name)", "in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file for result comparison. expected_outputs =", "\"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor info retrieved from GetNetworkOutputBindingInfo", "tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser):", "value for layer_id, which is unused in the actual implementation layer_id = 0", "retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() ==", "implementation layer_id = 0 input_name = 'input_1' output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id,", "= 0 input_name = 'input_1' output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends", "10 assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser =", "= 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions()", "destructor defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check to see that", "= parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors,", "ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = [] for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area())", "network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0 as a dummy value for layer_id,", "the exception returns # absolute path which will change on different machines. assert", "= ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0 as a dummy value", "actual implementation layer_id = 0 output_name = \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) #", "ownership of the return value. This allows the value to be automatically #", "assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() == 0 assert", "# Load test image data stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors", "= parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, \"mock_model.armnn\")) # use 0 as a dummy value for layer_id, which", "output_name = \"dense/Softmax\" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor info retrieved", "tensor.GetDataType() == 2 assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset()", "Copyright © 2020 Arm Ltd and Contributors. All rights reserved. # SPDX-License-Identifier: MIT", "= out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = []", "assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer()", "out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file for result comparison. expected_outputs", "[ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends,", "out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id,", "SWIG to take # ownership of the return value. This allows the value", "machines. assert 'Cannot read the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer()", "be automatically # garbage-collected when it is no longer in use assert parser.thisown", "will change on different machines. assert 'Cannot read the file' in str(err.value) def", "pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for part of the exception", "'Cannot read the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network =", "= parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime = ann.IRuntime(options)", "for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file for result", "preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages =", "# absolute path which will change on different machines. assert 'Cannot read the", "automatically # garbage-collected when it is no longer in use assert parser.thisown def", "'input_1' output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options", "[] for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file for", "2 assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() == 128", "This instructs SWIG to take # ownership of the return value. This allows", "ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(),", "read the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder,", "the actual implementation layer_id = 0 input_name = 'input_1' output_name = 'dense/Softmax' input_binding_info", "tensor info retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType() == 2 assert", "# Check to see that SWIG has ownership for parser. This instructs SWIG", "pyarmnn as ann import numpy as np @pytest.fixture() def parser(shared_data_folder): \"\"\" Parse and", "'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, \"There is a swig python destructor", "pytest import pyarmnn as ann import numpy as np @pytest.fixture() def parser(shared_data_folder): \"\"\"", "defined\" assert ann.IDeserializer.__swig_destroy__.__name__ == \"delete_IDeserializer\" def test_check_deserializer_swig_ownership(parser): # Check to see that SWIG", "of the return value. This allows the value to be automatically # garbage-collected", "value. This allows the value to be automatically # garbage-collected when it is", "= input_binding_info[1] assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() ==", "as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for part of the exception since", "input_name) tensor = input_binding_info[1] assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions() == 4 assert", "== messages # Load test image data stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder,", "enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder,", "input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info =", "the actual implementation layer_id = 0 input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name)", "for parser. This instructs SWIG to take # ownership of the return value." ]
[ "html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\") if", "\"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]]) while", "import threading thread_data = threading.local() import time myprint = lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d", "\"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\": \"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker,", "**kwargs): global a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url, params)}) x", "threading.local() import time myprint = lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time", "t = \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for i in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\"", "url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())), o=True, result=False, **kwargs) return", "from EasyLogin import EasyLogin, mymd5 from mpms import MPMS import threading thread_data =", "def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\") if __name__ == \"__main__\": #print(sign(1579490640000,", "a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for i", "result=False, **kwargs) return x.json() def tprint(*args): print(\"\\t\".join([str(i) for i in args])) def worker(item):", "MPMS(worker, handler, 2, 2, meta=meta) m.start() for t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"},", "handler, 2, 2, meta=meta) m.start() for t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]:", "t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]]) while len(m)>10: myprint(\"Remaning \"+str(len(m))) time.sleep(2)", "o=True, result=False, **kwargs) return x.json() def tprint(*args): print(\"\\t\".join([str(i) for i in args])) def", "def worker(item): global thread_data a = thread_data.__dict__.get(\"a\") if not a: a = EasyLogin(cachedir=\"cache\")", "html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def handler(meta, item):", "\"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler, 2, 2, meta=meta) m.start()", "= EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid)", "lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params):", "from mpms import MPMS import threading thread_data = threading.local() import time myprint =", "params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())), o=True, result=False, **kwargs) return x.json()", "meta=meta) m.start() for t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"],", "{\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"],", "(k,v) in sorted(params.items())), o=True, result=False, **kwargs) return x.json() def tprint(*args): print(\"\\t\".join([str(i) for i", "uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in", "item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\") if __name__ == \"__main__\":", "not a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid", "for i in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def get(url, params,", "[i[0]+str(i[1]) for i in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def get(url,", "a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())),", "EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return", "{\"size\": 12, \"page\":0, \"lang\": \"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler,", "t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]]) while len(m)>10: myprint(\"Remaning \"+str(len(m))) time.sleep(2) m.join()", "a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for", "x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())), o=True, result=False, **kwargs) return x.json() def", "args])) def worker(item): global thread_data a = thread_data.__dict__.get(\"a\") if not a: a =", "t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]]) while len(m)>10: myprint(\"Remaning \"+str(len(m)))", "s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def get(url, params, **kwargs): global a t", "for i in item])+\"\\n\") if __name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0,", "def get(url, params, **kwargs): global a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t,", "thread_data = threading.local() import time myprint = lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s))", "myprint = lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp,", "\"sign\": sign(t, url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())), o=True, result=False,", "o = [i[0]+str(i[1]) for i in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s)", "t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]]) while len(m)>10: myprint(\"Remaning \"+str(len(m))) time.sleep(2) m.join() myprint(\"Done!\")", "if not a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False)", "2, meta=meta) m.start() for t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"],", "__name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\": \"cn\"})) meta = {\"fp\":", "= \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for i in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t", "a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())), o=True, result=False, **kwargs) return x.json() def tprint(*args): print(\"\\t\".join([str(i)", "= thread_data.__dict__.get(\"a\") if not a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html =", "== \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\": \"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")}", "t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def get(url, params, **kwargs): global a t = int(time.time())*1000", "item])+\"\\n\") if __name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\": \"cn\"})) meta", "x.json() def tprint(*args): print(\"\\t\".join([str(i) for i in args])) def worker(item): global thread_data a", "sign(timestamp, url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for i in sorted(params.items())]", "a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def handler(meta,", "params, **kwargs): global a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url, params)})", "return item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\") if __name__ ==", "= html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\")", "item.append(uid) return item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\") if __name__", "= a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i)", "= threading.local() import time myprint = lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import", "meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\") if __name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12,", "EasyLogin, mymd5 from mpms import MPMS import threading thread_data = threading.local() import time", "worker(item): global thread_data a = thread_data.__dict__.get(\"a\") if not a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"]", "sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def get(url, params, **kwargs): global a", "i in args])) def worker(item): global thread_data a = thread_data.__dict__.get(\"a\") if not a:", "m.start() for t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"],", "meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler, 2, 2, meta=meta) m.start() for", "\"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\": \"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m", "in sorted(params.items())), o=True, result=False, **kwargs) return x.json() def tprint(*args): print(\"\\t\".join([str(i) for i in", "for i in args])) def worker(item): global thread_data a = thread_data.__dict__.get(\"a\") if not", "\"lang\": \"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler, 2, 2, meta=meta)", "\"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]])", "a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0]", "= {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler, 2, 2, meta=meta) m.start() for t", "int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in", "2, 2, meta=meta) m.start() for t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"],", "a = thread_data.__dict__.get(\"a\") if not a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html", "in item])+\"\\n\") if __name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\": \"cn\"}))", "{\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler, 2, 2, meta=meta) m.start() for t in", "import MPMS import threading thread_data = threading.local() import time myprint = lambda s:", "#print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\": \"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m =", "s=s)) import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o =", "mymd5 from mpms import MPMS import threading thread_data = threading.local() import time myprint", "{s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\"", "return x.json() def tprint(*args): print(\"\\t\".join([str(i) for i in args])) def worker(item): global thread_data", "in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"],", "params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for i in sorted(params.items())] s =", "global a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url, params)}) x =", "get(url, params, **kwargs): global a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url,", "t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"],", "MPMS import threading thread_data = threading.local() import time myprint = lambda s: print(\"[{showtime}]", "thread_data.__dict__.get(\"a\") if not a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html = a.get(\"https://person.zju.edu.cn/\"+item[3],", "= a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())), o=True, result=False, **kwargs) return x.json() def tprint(*args):", "thread_data a = thread_data.__dict__.get(\"a\") if not a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a", "sorted(params.items())), o=True, result=False, **kwargs) return x.json() def tprint(*args): print(\"\\t\".join([str(i) for i in args]))", "EasyLogin import EasyLogin, mymd5 from mpms import MPMS import threading thread_data = threading.local()", "return mymd5(s) def get(url, params, **kwargs): global a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t),", "\"timestamp\":str(t), \"sign\": sign(t, url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())), o=True,", "item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\") if __name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\":", "result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i", "for t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"])", "a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] = a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid =", "i in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def get(url, params, **kwargs):", "s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params): t", "handler(meta, item): meta[\"fp\"].write(\"\\t\".join([str(i) for i in item])+\"\\n\") if __name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\",", "#tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]]) while len(m)>10: myprint(\"Remaning", "time myprint = lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\") def", "url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for i in sorted(params.items())] s", "import time myprint = lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\")", "cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]]) while len(m)>10:", "= MPMS(worker, handler, 2, 2, meta=meta) m.start() for t in get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0,", "m = MPMS(worker, handler, 2, 2, meta=meta) m.start() for t in get(\"/api/front/psons/search\", {\"size\":10000,", "thread_data.__dict__[\"a\"] = a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item", "def tprint(*args): print(\"\\t\".join([str(i) for i in args])) def worker(item): global thread_data a =", "sign(t, url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v) in sorted(params.items())), o=True, result=False, **kwargs)", "= int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for (k,v)", "open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler, 2, 2, meta=meta) m.start() for t in get(\"/api/front/psons/search\",", "in args])) def worker(item): global thread_data a = thread_data.__dict__.get(\"a\") if not a: a", "i in item])+\"\\n\") if __name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\":", "time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for", "for (k,v) in sorted(params.items())), o=True, result=False, **kwargs) return x.json() def tprint(*args): print(\"\\t\".join([str(i) for", "print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params): t =", "= lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url,", "print(\"\\t\".join([str(i) for i in args])) def worker(item): global thread_data a = thread_data.__dict__.get(\"a\") if", "%H:%M:%S\"), s=s)) import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o", "= t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def get(url, params, **kwargs): global a t =", "mymd5(s) def get(url, params, **kwargs): global a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\":", "\"page\":0, \"lang\": \"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler, 2, 2,", "a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v)", "= a html = a.get(\"https://person.zju.edu.cn/\"+item[3], result=False) uid = html.split(\"getQRcode.php?uid=\",2)[1].split(\"&\",2)[0] item.append(uid) return item def", "\"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for i in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return", "import EasyLogin, mymd5 from mpms import MPMS import threading thread_data = threading.local() import", "**kwargs) return x.json() def tprint(*args): print(\"\\t\".join([str(i) for i in args])) def worker(item): global", "<reponame>zjuchenyuan/EasyLogin<filename>examples/personzju/run.py from EasyLogin import EasyLogin, mymd5 from mpms import MPMS import threading thread_data", "\"+t return mymd5(s) def get(url, params, **kwargs): global a t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\",", "import time a=EasyLogin(cachedir=\"cache\") def sign(timestamp, url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1])", "def sign(timestamp, url, params): t = \"1f11192bd9d14a09b29fc59d556e24e3\" o = [i[0]+str(i[1]) for i in", "threading thread_data = threading.local() import time myprint = lambda s: print(\"[{showtime}] {s}\".format(showtime=time.strftime(\"%Y-%m-%d %H:%M:%S\"),", "get(\"/api/front/psons/search\", {\"size\":10000, \"page\":0, \"lang\":\"cn\"}, cache=True)[\"data\"][\"content\"]: #tprint(t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"], t[\"mapping_name\"], t[\"access_count\"]) m.put([t[\"cn_name\"], t[\"college_name\"], t[\"work_title\"],", "12, \"page\":0, \"lang\": \"cn\"})) meta = {\"fp\": open(\"personzju.txt\",\"w\",encoding=\"utf-8\")} m = MPMS(worker, handler, 2,", "= [i[0]+str(i[1]) for i in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def", "in sorted(params.items())] s = t+url+\"\".join(o)+str(timestamp)+\" \"+t return mymd5(s) def get(url, params, **kwargs): global", "if __name__ == \"__main__\": #print(sign(1579490640000, \"/api/front/psons/search\", {\"size\": 12, \"page\":0, \"lang\": \"cn\"})) meta =", "mpms import MPMS import threading thread_data = threading.local() import time myprint = lambda", "global thread_data a = thread_data.__dict__.get(\"a\") if not a: a = EasyLogin(cachedir=\"cache\") thread_data.__dict__[\"a\"] =", "tprint(*args): print(\"\\t\".join([str(i) for i in args])) def worker(item): global thread_data a = thread_data.__dict__.get(\"a\")", "t = int(time.time())*1000 a.s.headers.update({\"appKey\":\"50634610756a4c0e82d5a13bb692e257\", \"timestamp\":str(t), \"sign\": sign(t, url, params)}) x = a.get(\"https://person.zju.edu.cn/server\"+url+\"?\"+\"&\".join(k+\"=\"+str(v) for" ]
[ "= \"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while", "sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb')", "am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks -locations | grep", "-blocks -locations | grep 'Data' | sed 's/^.*: //'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result)", "= open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string", "re class ProcessData: messageType=\"\" fileName = \"\" data = \"\" dataNodeAddressList=[] serv =", "import re class ProcessData: messageType=\"\" fileName = \"\" data = \"\" dataNodeAddressList=[] serv", "data_variable = pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file", "= pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file =", "+\" -files -blocks -locations | grep 'Data' | sed 's/^.*: //'e\" result=os.popen(command).read() r1", "/root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData();", "/mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name)", "= conn.recv(4096) if not packet: break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data +=", "data=b\"\" while True: packet = conn.recv(4096) if not packet: break if packet[-4:]==\"FINI\".encode(): data", "break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data += packet #print(packet) except: print(\"recv error\")", "packet #print(packet) except: print(\"recv error\") print(data) data_variable = pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs", "pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \"", "#serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = ''", "True: packet = conn.recv(4096) if not packet: break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break", "conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks -locations |", "= re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string = pickle.dumps(sendData) conn.send(data_string) conn.close() print ('client", "sed 's/^.*: //'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string =", "COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif", "dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck", "= open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs", "-copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb')", "/mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks", "conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try: data=b\"\" while True:", "serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr", "packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data += packet #print(packet) except: print(\"recv error\") print(data) data_variable", "socket import pickle import os import re class ProcessData: messageType=\"\" fileName = \"\"", "data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\")", "= open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode())", "addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try: data=b\"\" while True: packet", "os import re class ProcessData: messageType=\"\" fileName = \"\" data = \"\" dataNodeAddressList=[]", "dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\",", "True: conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try: data=b\"\" while", "\" /mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files", "open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal", "command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks -locations | grep 'Data' | sed 's/^.*:", "fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs", "packet: break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data += packet #print(packet) except: print(\"recv", "dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn,", "elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND", "import socket import pickle import os import re class ProcessData: messageType=\"\" fileName =", "class ProcessData: messageType=\"\" fileName = \"\" data = \"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET,", "messageType=\"\" fileName = \"\" data = \"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103',", "fileName = \"\" data = \"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347))", "print(data) data_variable = pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command)", "/root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\"", "/mapreduce/\"+data_variable.fileName +\" -files -blocks -locations | grep 'Data' | sed 's/^.*: //'e\" result=os.popen(command).read()", "if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb')", "data += packet #print(packet) except: print(\"recv error\") print(data) data_variable = pickle.loads(data) if data_variable.messageType==4:", "+= packet #print(packet) except: print(\"recv error\") print(data) data_variable = pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs", "\"\" data = \"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0)", "grep 'Data' | sed 's/^.*: //'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData();", "file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file)", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr =", "fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks -locations | grep 'Data' | sed 's/^.*: //'e\"", "= \"\" data = \"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5)", "file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1)", "data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file", "if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data += packet #print(packet) except: print(\"recv error\") print(data)", "'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData)", "ProcessData: messageType=\"\" fileName = \"\" data = \"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks -locations | grep 'Data' | sed", "#serv.settimeout(1.0) from_client = '' try: data=b\"\" while True: packet = conn.recv(4096) if not", "command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file =", "SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks -locations | grep 'Data'", "data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file)", "print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I am", "conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data)", "CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3:", "pickle import os import re class ProcessData: messageType=\"\" fileName = \"\" data =", "result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string = pickle.dumps(sendData) conn.send(data_string) conn.close()", "error\") print(data) data_variable = pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\"", "while True: conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try: data=b\"\"", "| sed 's/^.*: //'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string", "r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string = pickle.dumps(sendData) conn.send(data_string) conn.close() print", "serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try: data=b\"\" while True: packet = conn.recv(4096)", "data = \"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0)", "\"\" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True:", "os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName", "print(\"recv error\") print(data) data_variable = pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \"", "os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks -locations", "#file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string)", "baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file", "serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client =", "not packet: break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data += packet #print(packet) except:", "#print(packet) except: print(\"recv error\") print(data) data_variable = pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal", "re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string = pickle.dumps(sendData) conn.send(data_string) conn.close() print ('client disconnected')", "elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName +\" -files -blocks -locations | grep 'Data' |", "| grep 'Data' | sed 's/^.*: //'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData=", "-files -blocks -locations | grep 'Data' | sed 's/^.*: //'e\" result=os.popen(command).read() r1 =", "sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file =", "data +=packet[:-4] break data += packet #print(packet) except: print(\"recv error\") print(data) data_variable =", "socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr = serv.accept() #serv.setblocking(0)", "print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I", "import pickle import os import re class ProcessData: messageType=\"\" fileName = \"\" data", "open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string =", "= pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close()", "#serv.setblocking(0) while True: conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try:", "-locations | grep 'Data' | sed 's/^.*: //'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1)", "ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName,", "<reponame>Tianyi6679/mincemeatpy import socket import pickle import os import re class ProcessData: messageType=\"\" fileName", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr = serv.accept()", "pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+ \" /root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName,", "open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif", "\" /root/hdfsTemp2/\" os.system(command) file = open(\"/root/hdfsTemp2/\"+data_variable.fileName, 'rb') #file = open(\"/root/hdfsTemp2/fileName453\", 'rb') baseFileName=os.path.basename(file.name) sendData=", "serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0)", "from_client = '' try: data=b\"\" while True: packet = conn.recv(4096) if not packet:", "break data += packet #print(packet) except: print(\"recv error\") print(data) data_variable = pickle.loads(data) if", "print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode())", "-copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\" os.system(fileName1) conn.send(\"I am SERVER\\n\".encode()) elif data_variable.messageType==3: command=\"$HADOOP_HOME/bin/hdfs fsck /mapreduce/\"+data_variable.fileName", "#serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try: data=b\"\" while True: packet = conn.recv(4096) if", "'Data' | sed 's/^.*: //'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1", "'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+", "//'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string = pickle.dumps(sendData) conn.send(data_string)", "while True: packet = conn.recv(4096) if not packet: break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4]", "'' try: data=b\"\" while True: packet = conn.recv(4096) if not packet: break if", "'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2:", "12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client", "if not packet: break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data += packet #print(packet)", "conn.recv(4096) if not packet: break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data += packet", "'s/^.*: //'e\" result=os.popen(command).read() r1 = re.findall(r\"\\b(?:\\d{1,3}\\.){3}\\d{1,3}\\b\",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string = pickle.dumps(sendData)", "except: print(\"recv error\") print(data) data_variable = pickle.loads(data) if data_variable.messageType==4: command=\"$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/\"+data_variable.fileName+", "import os import re class ProcessData: messageType=\"\" fileName = \"\" data = \"\"", "= serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try: data=b\"\" while True: packet =", "file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/\"+data_variable.fileName+ \" /mapreduce/\"", "pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName)", "try: data=b\"\" while True: packet = conn.recv(4096) if not packet: break if packet[-4:]==\"FINI\".encode():", "= '' try: data=b\"\" while True: packet = conn.recv(4096) if not packet: break", "+=packet[:-4] break data += packet #print(packet) except: print(\"recv error\") print(data) data_variable = pickle.loads(data)", "file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS COMMAND CALLED\") fileName1=\"$HADOOP_HOME/bin/hdfs", "packet = conn.recv(4096) if not packet: break if packet[-4:]==\"FINI\".encode(): data +=packet[:-4] break data", "sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data,", "conn.send(\"FINI\".encode()) elif data_variable.messageType==2: file = open(\"/root/hdfsTemp/\"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print(\"OS" ]
[ "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1)", "-target*(input1 - input2) + self.margin) if self.reduction == 'mean': output = np.mean(output) elif", "ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target): output = np.maximum(0, -target*(input1 - input2) +", "def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin = margin self.sum", "self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target): output = np.maximum(0, -target*(input1 -", "dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1) y = np.ones_like((dist_an))", "dist + dist.T dist = self.addmm(dist, 1, -2, inputs, inputs.T) dist = np.sqrt(np.clip(dist,", "dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1)", "this file except in compliance with the License. # You may obtain a", "xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask,", "+ out * beta class TripletLoss(nn.Cell): \"\"\"Triplet loss with hard positive/negative mining\"\"\" def", "= self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist,", "self.pow = ops.Pow() self.equal = ops.Equal() self.cast = ops.Cast() self.select = ops.Select() self.reducemax", "Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0", "ANY KIND, either express or implied. # See the License for the specific", "self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand", "ops.matmul(mat1, mat2) return mat * alpha + out * beta class TripletLoss(nn.Cell): \"\"\"Triplet", "self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1) y = np.ones_like((dist_an)) loss", "construct(self, mat, alpha, beta, mat1, mat2): out = ops.matmul(mat1, mat2) return mat *", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "import mindspore.numpy as np import mindspore.common.dtype as mstype from mindspore import Tensor class", "np.mean(output) elif self.reduction == 'sum': output = self.sum(output, 0) return output class addmm(nn.Cell):", "self.reduction == 'sum': output = self.sum(output, 0) return output class addmm(nn.Cell): \"\"\"function _addmm\"\"\"", "mat, alpha, beta, mat1, mat2): out = ops.matmul(mat1, mat2) return mat * alpha", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "OF ANY KIND, either express or implied. # See the License for the", "= self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros) mask_zeros", "* alpha + out * beta class TripletLoss(nn.Cell): \"\"\"Triplet loss with hard positive/negative", "self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an", "return output class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self, mat, alpha, beta, mat1, mat2):", "\"\"\"Triplet loss with hard positive/negative mining\"\"\" import mindspore.nn as nn import mindspore.ops as", "self.addmm = addmm() self.pow = ops.Pow() self.equal = ops.Equal() self.cast = ops.Cast() self.select", "governing permissions and # limitations under the License. # ============================================================================ \"\"\"Triplet loss with", "# limitations under the License. # ============================================================================ \"\"\"Triplet loss with hard positive/negative mining\"\"\"", "mindspore.nn as nn import mindspore.ops as ops import mindspore.numpy as np import mindspore.common.dtype", "dist = self.expand(inputs_) # (32, 32) dist = dist + dist.T dist =", "and # limitations under the License. # ============================================================================ \"\"\"Triplet loss with hard positive/negative", "positive/negative mining\"\"\" def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm() self.pow =", "ops.Pow() self.equal = ops.Equal() self.cast = ops.Cast() self.select = ops.Select() self.reducemax = ops.ReduceMax()", "Co., Ltd # # Licensed under the Apache License, Version 2.0 (the \"License\");", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "= MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size,", "self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap =", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "= margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target): output = np.maximum(0,", "mat2): out = ops.matmul(mat1, mat2) return mat * alpha + out * beta", "mindspore.ops as ops import mindspore.numpy as np import mindspore.common.dtype as mstype from mindspore", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap", "# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache", "limitations under the License. # ============================================================================ \"\"\"Triplet loss with hard positive/negative mining\"\"\" import", "mining\"\"\" def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm() self.pow = ops.Pow()", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "mindspore.numpy as np import mindspore.common.dtype as mstype from mindspore import Tensor class MarginRankingLoss(nn.Cell):", "required by applicable law or agreed to in writing, software # distributed under", "(32, 32) dist = dist + dist.T dist = self.addmm(dist, 1, -2, inputs,", "mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask,", "applicable law or agreed to in writing, software # distributed under the License", "TripletLoss(nn.Cell): \"\"\"Triplet loss with hard positive/negative mining\"\"\" def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__()", "self.margin) if self.reduction == 'mean': output = np.mean(output) elif self.reduction == 'sum': output", "32) dist = dist + dist.T dist = self.addmm(dist, 1, -2, inputs, inputs.T)", "\"\"\"Triplet loss with hard positive/negative mining\"\"\" def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm", "super(TripletLoss, self).__init__() self.addmm = addmm() self.pow = ops.Pow() self.equal = ops.Equal() self.cast =", "or agreed to in writing, software # distributed under the License is distributed", "Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction =", "Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs, 2)", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "dist_ap = self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros,", "65535).astype(np.float32)) def construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs, 2) inputs_ =", "'mean': output = np.mean(output) elif self.reduction == 'sum': output = self.sum(output, 0) return", "inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1) dist", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask =", "writing, software # distributed under the License is distributed on an \"AS IS\"", "mat1, mat2): out = ops.matmul(mat1, mat2) return mat * alpha + out *", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "License. # You may obtain a copy of the License at # #", "= dist + dist.T dist = self.addmm(dist, 1, -2, inputs, inputs.T) dist =", "import mindspore.nn as nn import mindspore.ops as ops import mindspore.numpy as np import", "positive/negative mining\"\"\" import mindspore.nn as nn import mindspore.ops as ops import mindspore.numpy as", "ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size,", "self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs)", "compliance with the License. # You may obtain a copy of the License", "# ============================================================================ \"\"\"Triplet loss with hard positive/negative mining\"\"\" import mindspore.nn as nn import", "def construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs, 2) inputs_ = self.sum(inputs_,", "= Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets): \"\"\"TripletLoss", "= ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand =", "dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "mindspore.common.dtype as mstype from mindspore import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self,", "batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs,", "self.addmm(dist, 1, -2, inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets =", "= np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T)", "xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap =", "as mstype from mindspore import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0,", "== 'sum': output = self.sum(output, 0) return output class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def", "target): output = np.maximum(0, -target*(input1 - input2) + self.margin) if self.reduction == 'mean':", "not use this file except in compliance with the License. # You may", "inputs_ = self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1) dist = self.expand(inputs_) # (32,", "self.equal = ops.Equal() self.cast = ops.Cast() self.select = ops.Select() self.reducemax = ops.ReduceMax() self.reducemin", "dist = dist + dist.T dist = self.addmm(dist, 1, -2, inputs, inputs.T) dist", "import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction", "License, Version 2.0 (the \"License\"); # you may not use this file except", "= ops.Select() self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss =", "self.margin = margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target): output =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "hard positive/negative mining\"\"\" import mindspore.nn as nn import mindspore.ops as ops import mindspore.numpy", "# you may not use this file except in compliance with the License.", "ops.Select() self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin)", "ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros =", "agreed to in writing, software # distributed under the License is distributed on", "batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs, 2) inputs_", "self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an =", "= ops.Pow() self.equal = ops.Equal() self.cast = ops.Cast() self.select = ops.Select() self.reducemax =", "(the \"License\"); # you may not use this file except in compliance with", "dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap", "self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_ =", "__init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin = margin self.sum =", "# Unless required by applicable law or agreed to in writing, software #", "mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an =", "by applicable law or agreed to in writing, software # distributed under the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "alpha + out * beta class TripletLoss(nn.Cell): \"\"\"Triplet loss with hard positive/negative mining\"\"\"", "margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm() self.pow = ops.Pow() self.equal = ops.Equal() self.cast", "if self.reduction == 'mean': output = np.mean(output) elif self.reduction == 'sum': output =", "= self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1) dist = self.expand(inputs_) # (32, 32)", "file except in compliance with the License. # You may obtain a copy", "def construct(self, input1, input2, target): output = np.maximum(0, -target*(input1 - input2) + self.margin)", "self.select = ops.Select() self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss", "construct\"\"\" inputs_ = self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1) dist = self.expand(inputs_) #", "License for the specific language governing permissions and # limitations under the License.", "= self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1) y", "= addmm() self.pow = ops.Pow() self.equal = ops.Equal() self.cast = ops.Cast() self.select =", "out = ops.matmul(mat1, mat2) return mat * alpha + out * beta class", "margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target): output = np.maximum(0, -target*(input1", "to in writing, software # distributed under the License is distributed on an", "implied. # See the License for the specific language governing permissions and #", "output = np.maximum(0, -target*(input1 - input2) + self.margin) if self.reduction == 'mean': output", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "+ self.margin) if self.reduction == 'mean': output = np.mean(output) elif self.reduction == 'sum':", "* beta class TripletLoss(nn.Cell): \"\"\"Triplet loss with hard positive/negative mining\"\"\" def __init__(self, batch_size,", "self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size,", "0) return output class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self, mat, alpha, beta, mat1,", "============================================================================ \"\"\"Triplet loss with hard positive/negative mining\"\"\" import mindspore.nn as nn import mindspore.ops", "2) inputs_ = self.sum(inputs_, 1) dist = self.expand(inputs_) # (32, 32) dist =", "or implied. # See the License for the specific language governing permissions and", "Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\"", "margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin = margin self.sum = ops.ReduceSum(keep_dims=False)", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "targets): \"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1) dist =", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "-2, inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32)", "\"\"\"function _addmm\"\"\" def construct(self, mat, alpha, beta, mat1, mat2): out = ops.matmul(mat1, mat2)", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "mstype from mindspore import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'):", "in writing, software # distributed under the License is distributed on an \"AS", "loss with hard positive/negative mining\"\"\" import mindspore.nn as nn import mindspore.ops as ops", "'sum': output = self.sum(output, 0) return output class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self,", "= self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros)", "= ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "dist_ap = self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1) y = np.ones_like((dist_an)) loss =", "= self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1) y = np.ones_like((dist_an)) loss = self.ranking_loss(dist_an,", "out * beta class TripletLoss(nn.Cell): \"\"\"Triplet loss with hard positive/negative mining\"\"\" def __init__(self,", "class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction", "self.reduction = reduction self.margin = margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1, input2,", "ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self,", "class TripletLoss(nn.Cell): \"\"\"Triplet loss with hard positive/negative mining\"\"\" def __init__(self, batch_size, margin=0.3): super(TripletLoss,", "= self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1)", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "= ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros", "you may not use this file except in compliance with the License. #", "inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask", "loss with hard positive/negative mining\"\"\" def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm =", "hard positive/negative mining\"\"\" def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm() self.pow", "input1, input2, target): output = np.maximum(0, -target*(input1 - input2) + self.margin) if self.reduction", "input2) + self.margin) if self.reduction == 'mean': output = np.mean(output) elif self.reduction ==", "specific language governing permissions and # limitations under the License. # ============================================================================ \"\"\"Triplet", "use this file except in compliance with the License. # You may obtain", "= reduction self.margin = margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target):", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "the License. # ============================================================================ \"\"\"Triplet loss with hard positive/negative mining\"\"\" import mindspore.nn as", "== 'mean': output = np.mean(output) elif self.reduction == 'sum': output = self.sum(output, 0)", "2.0 (the \"License\"); # you may not use this file except in compliance", "ops import mindspore.numpy as np import mindspore.common.dtype as mstype from mindspore import Tensor", "self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32))", "with hard positive/negative mining\"\"\" def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm()", "for the specific language governing permissions and # limitations under the License. #", "self.cast = ops.Cast() self.select = ops.Select() self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "self).__init__() self.reduction = reduction self.margin = margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1,", "self.expand(inputs_) # (32, 32) dist = dist + dist.T dist = self.addmm(dist, 1,", "MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin = margin", "# # Unless required by applicable law or agreed to in writing, software", "1, -2, inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets,", "express or implied. # See the License for the specific language governing permissions", "beta class TripletLoss(nn.Cell): \"\"\"Triplet loss with hard positive/negative mining\"\"\" def __init__(self, batch_size, margin=0.3):", "\"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin =", "= self.sum(output, 0) return output class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self, mat, alpha,", "either express or implied. # See the License for the specific language governing", "= ops.Cast() self.select = ops.Select() self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum =", "nn import mindspore.ops as ops import mindspore.numpy as np import mindspore.common.dtype as mstype", "batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm() self.pow = ops.Pow() self.equal = ops.Equal()", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs =", "2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version", "self.sum(inputs_, 1) dist = self.expand(inputs_) # (32, 32) dist = dist + dist.T", "\"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1) dist = self.expand(inputs_)", "output = self.sum(output, 0) return output class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self, mat,", "self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1) y = np.ones_like((dist_an)) loss = self.ranking_loss(dist_an, dist_ap,", "the License. # You may obtain a copy of the License at #", "= self.reducemin(dist_an, 1) y = np.ones_like((dist_an)) loss = self.ranking_loss(dist_an, dist_ap, y) return loss", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "_addmm\"\"\" def construct(self, mat, alpha, beta, mat1, mat2): out = ops.matmul(mat1, mat2) return", "dist_an = self.reducemin(dist_an, 1) y = np.ones_like((dist_an)) loss = self.ranking_loss(dist_an, dist_ap, y) return", "Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License,", "mindspore import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__()", "self.reduction == 'mean': output = np.mean(output) elif self.reduction == 'sum': output = self.sum(output,", "output class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self, mat, alpha, beta, mat1, mat2): out", "self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size))", "Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the", "dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets),", "mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32),", "+ dist.T dist = self.addmm(dist, 1, -2, inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12,", "= Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_ = self.pow(inputs,", "= ops.Equal() self.cast = ops.Cast() self.select = ops.Select() self.reducemax = ops.ReduceMax() self.reducemin =", "= ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target): output = np.maximum(0, -target*(input1 - input2)", "addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self, mat, alpha, beta, mat1, mat2): out = ops.matmul(mat1,", "with the License. # You may obtain a copy of the License at", "from mindspore import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss,", "input2, target): output = np.maximum(0, -target*(input1 - input2) + self.margin) if self.reduction ==", "self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an,", "ops.Equal() self.cast = ops.Cast() self.select = ops.Select() self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin()", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "dist.T dist = self.addmm(dist, 1, -2, inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)),", "__init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm() self.pow = ops.Pow() self.equal =", "= np.maximum(0, -target*(input1 - input2) + self.margin) if self.reduction == 'mean': output =", "inputs_ = self.sum(inputs_, 1) dist = self.expand(inputs_) # (32, 32) dist = dist", "alpha, beta, mat1, mat2): out = ops.matmul(mat1, mat2) return mat * alpha +", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1) y =", "return mat * alpha + out * beta class TripletLoss(nn.Cell): \"\"\"Triplet loss with", "ops.Cast() self.select = ops.Select() self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True)", "as ops import mindspore.numpy as np import mindspore.common.dtype as mstype from mindspore import", "targets = self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros)", "= ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32))", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "def construct(self, mat, alpha, beta, mat1, mat2): out = ops.matmul(mat1, mat2) return mat", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "= self.addmm(dist, 1, -2, inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets", "addmm() self.pow = ops.Pow() self.equal = ops.Equal() self.cast = ops.Cast() self.select = ops.Select()", "reduction self.margin = margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target): output", "super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin = margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self,", "self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets):", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "import mindspore.common.dtype as mstype from mindspore import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def", "1) dist = self.expand(inputs_) # (32, 32) dist = dist + dist.T dist", "self).__init__() self.addmm = addmm() self.pow = ops.Pow() self.equal = ops.Equal() self.cast = ops.Cast()", "self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros) mask_zeros =", "with hard positive/negative mining\"\"\" import mindspore.nn as nn import mindspore.ops as ops import", "reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin = margin self.sum = ops.ReduceSum(keep_dims=False) def", "ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs", "self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1) dist = self.expand(inputs_) # (32, 32) dist", "permissions and # limitations under the License. # ============================================================================ \"\"\"Triplet loss with hard", "language governing permissions and # limitations under the License. # ============================================================================ \"\"\"Triplet loss", "mat * alpha + out * beta class TripletLoss(nn.Cell): \"\"\"Triplet loss with hard", "1) dist_an = self.reducemin(dist_an, 1) y = np.ones_like((dist_an)) loss = self.ranking_loss(dist_an, dist_ap, y)", "= np.mean(output) elif self.reduction == 'sum': output = self.sum(output, 0) return output class", "mat2) return mat * alpha + out * beta class TripletLoss(nn.Cell): \"\"\"Triplet loss", "dist = self.addmm(dist, 1, -2, inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype)", "as nn import mindspore.ops as ops import mindspore.numpy as np import mindspore.common.dtype as", "MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin", "beta, mat1, mat2): out = ops.matmul(mat1, mat2) return mat * alpha + out", "as np import mindspore.common.dtype as mstype from mindspore import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function", "= self.expand(inputs_) # (32, 32) dist = dist + dist.T dist = self.addmm(dist,", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "import mindspore.ops as ops import mindspore.numpy as np import mindspore.common.dtype as mstype from", "output = np.mean(output) elif self.reduction == 'sum': output = self.sum(output, 0) return output", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "- input2) + self.margin) if self.reduction == 'mean': output = np.mean(output) elif self.reduction", "np import mindspore.common.dtype as mstype from mindspore import Tensor class MarginRankingLoss(nn.Cell): \"\"\"function MarginRankingLoss\"\"\"", "License. # ============================================================================ \"\"\"Triplet loss with hard positive/negative mining\"\"\" import mindspore.nn as nn", "MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size),", "class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self, mat, alpha, beta, mat1, mat2): out =", "self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an", "the specific language governing permissions and # limitations under the License. # ============================================================================", "batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets): \"\"\"TripletLoss construct\"\"\" inputs_", "construct(self, input1, input2, target): output = np.maximum(0, -target*(input1 - input2) + self.margin) if", "mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap,", "np.maximum(0, -target*(input1 - input2) + self.margin) if self.reduction == 'mean': output = np.mean(output)", "Ltd # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "under the License. # ============================================================================ \"\"\"Triplet loss with hard positive/negative mining\"\"\" import mindspore.nn", "self.sum(output, 0) return output class addmm(nn.Cell): \"\"\"function _addmm\"\"\" def construct(self, mat, alpha, beta,", "# (32, 32) dist = dist + dist.T dist = self.addmm(dist, 1, -2,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "mining\"\"\" import mindspore.nn as nn import mindspore.ops as ops import mindspore.numpy as np", "elif self.reduction == 'sum': output = self.sum(output, 0) return output class addmm(nn.Cell): \"\"\"function", "= ops.matmul(mat1, mat2) return mat * alpha + out * beta class TripletLoss(nn.Cell):", "def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm() self.pow = ops.Pow() self.equal", "= self.sum(inputs_, 1) dist = self.expand(inputs_) # (32, 32) dist = dist +" ]
[ "gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / \"data.csv\" def test_loading_data(self): loader", "\"data.csv\" def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis = loader.analysis exercises = analysis.exercises self.assertEqual(len(exercises),", "TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / \"data.csv\" def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis =", "def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis = loader.analysis exercises = analysis.exercises self.assertEqual(len(exercises), 890)", "test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis = loader.analysis exercises = analysis.exercises self.assertEqual(len(exercises), 890) self.assertEqual(exercises[3].average_weight,", "class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / \"data.csv\" def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis", "/ \"data.csv\" def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis = loader.analysis exercises = analysis.exercises", "= Path(__file__).parent / \"data.csv\" def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis = loader.analysis exercises", "TestCase from gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / \"data.csv\" def", "Path from unittest import TestCase from gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename =", "from unittest import TestCase from gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent", "unittest import TestCase from gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent /", "FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / \"data.csv\" def test_loading_data(self): loader = FitbodLoader(self.data_filename)", "loader = FitbodLoader(self.data_filename) analysis = loader.analysis exercises = analysis.exercises self.assertEqual(len(exercises), 890) self.assertEqual(exercises[3].average_weight, 38.5)", "Path(__file__).parent / \"data.csv\" def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis = loader.analysis exercises =", "from pathlib import Path from unittest import TestCase from gains.io import FitbodLoader class", "from gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / \"data.csv\" def test_loading_data(self):", "import Path from unittest import TestCase from gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename", "data_filename = Path(__file__).parent / \"data.csv\" def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis = loader.analysis", "import FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / \"data.csv\" def test_loading_data(self): loader =", "pathlib import Path from unittest import TestCase from gains.io import FitbodLoader class TestFitbodLoader(TestCase):", "import TestCase from gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / \"data.csv\"" ]
[ "import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual(", "instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second', 'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def", "test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second', 'second'),", "[('first', 'first'), ('second', 'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_'", "return 'Custom_' + choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual(", "= EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices, [('Custom_first', 'first'), ('Custom_second', 'second'), ('Custom_third',", "choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices, [('Custom_first', 'first'), ('Custom_second', 'second'), ('Custom_third', 'third')] )", ".testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices()", "choices = instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second', 'second'), ('third', 'third')] ) def", "EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices, [('Custom_first', 'first'), ('Custom_second', 'second'), ('Custom_third', 'third')]", "+ choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices, [('Custom_first',", "'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' + choice.value, choice.value", "choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices, [('Custom_first', 'first'),", "('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' + choice.value, choice.value instance", "def choice_builder(choice): return 'Custom_' + choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices =", "test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' + choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices", "= EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second', 'second'), ('third', 'third')]", "EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second', 'second'), ('third', 'third')] )", "('second', 'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' + choice.value,", "choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices, [('Custom_first', 'first'), ('Custom_second',", "django.test import TestCase from django_enum_choices.forms import EnumChoiceField from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase):", "CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices,", "instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices, [('Custom_first', 'first'), ('Custom_second', 'second'),", "from django.test import TestCase from django_enum_choices.forms import EnumChoiceField from .testapp.enumerations import CharTestEnum class", "class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices, [('first',", "FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices, [('first', 'first'),", "'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' + choice.value, choice.value instance =", "EnumChoiceField from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices", "'Custom_' + choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices,", "import EnumChoiceField from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum)", "django_enum_choices.forms import EnumChoiceField from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance =", "= instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second', 'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self):", ") def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' + choice.value, choice.value instance = EnumChoiceField(CharTestEnum,", "choices, [('first', 'first'), ('second', 'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return", "'first'), ('second', 'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' +", "TestCase from django_enum_choices.forms import EnumChoiceField from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self):", "self.assertEqual( choices, [('first', 'first'), ('second', 'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice):", "choice_builder(choice): return 'Custom_' + choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices()", "from django_enum_choices.forms import EnumChoiceField from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance", "instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second', 'second'), ('third',", "import TestCase from django_enum_choices.forms import EnumChoiceField from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def", "def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' + choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder)", "def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second',", "from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices =" ]
[ "the unique application key (``byte`` string), defaults to SSB's \"\"\" def __init__(self, local_key,", "the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is (a *", "details) # # Permission is hereby granted, free of charge, to any person", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "= (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate and return a challenge to", "crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00'", "= crypto_box_open_afternm(data, b'\\x00' * 24, box_secret) signature, public_key = self.hello[:64], self.hello[64:] signed =", "= self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret)", "= ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate and return", "= None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret", "a challenge to be sent to the server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "this is hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None):", "self.shared_hash = None self.remote_ephemeral_key = None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return {", "+ bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify the correctness of challenge sent from the", "of challenge sent from the client.\"\"\" assert len(data) == 64 sent_hmac, remote_ephemeral_key =", "# this is hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def clean(self,", "will correspond to H = sign(A)[K | Bp | hash(a * b)] |", "generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" * 24 # return box(K | a", "hash(a * b)] # let's see if that signature can verify the reconstructed", "the Software without restriction, including without limitation the rights # to use, copy,", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "return box(K | a * b | a * B)[H] return crypto_box_afternm(self.hello, nonce,", "VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass class SHSCryptoBase(object): def", "sent_hmac, remote_ephemeral_key = data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32]", "person obtaining a copy # of this software and associated documentation files (the", "(``byte`` string), defaults to SSB's \"\"\" def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto,", "our encrypted message signature = crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise SHSError('Error decrypting", "super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is (a * B) a_bob =", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import", "self.hello[:64], self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key) #", "pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key +", "hash(K | a * b | a * B | A * b)", "server's public key (``byte`` string) :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param application_key:", "| hash(a * b)] # let's see if that signature can verify the", "data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac", "hmac from base64 import b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "= crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this is hash(a * b) self.shared_hash", "= data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok =", "server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data):", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "= a_bob # this shall be hash(K | a * b | a", "the correctness of challenge sent from the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey =", "= crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob # this shall be hash(K | a", "message signature = crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise SHSError('Error decrypting server acceptance", "import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError from nacl.public import PrivateKey from", "side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto,", "included in all # copies or substantial portions of the Software. # #", "self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret +", "+ self.shared_secret + a_bob + b_alice).digest()[:32] return True def generate_accept(self): okay = self.local_key.sign(self.application_key", "return a challenge to be sent to the server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key)", "remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac == sent_hmac if ok: #", "= hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret +", "+ self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00' * 24, box_secret) signature, public_key", "self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00' * 24, box_secret) signature, public_key =", "def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(),", "is hereby granted, free of charge, to any person obtaining a copy #", "self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice = None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates", "persons to whom the Software is # furnished to do so, subject to", "correspond to H = sign(A)[K | Bp | hash(a * b)] | Ap", "conditions: # # The above copyright notice and this permission notice shall be", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "more details) # # Permission is hereby granted, free of charge, to any", "signature = crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise SHSError('Error decrypting server acceptance message')", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "documentation files (the \"Software\"), to deal # in the Software without restriction, including", "nonce = b\"\\x00\" * 24 # return box(K | a * b |", "to permit persons to whom the Software is # furnished to do so,", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "nacl.public import PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): \"\"\"A", "self.shared_secret + self.a_bob + b_alice).digest() nonce = b\"\\x00\" * 24 try: # let's", "# let's use the box secret to unbox our encrypted message signature =", "self.b_alice = None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates all the SHS client-side", "crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret) return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello", "None self.b_alice = None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates all the SHS", "hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(),", "of charge, to any person obtaining a copy # of this software and", "self.hello = message_to_box return True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" *", "exception.\"\"\" pass class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key", "b_alice).digest() nonce = b\"\\x00\" * 24 try: # let's use the box secret", "(a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this is", "\"\"\"Generate and return a challenge to be sent to the server.\"\"\" return self.local_app_hmac", "* 24 # return box(K | a * b | a * B)[H]", "if that signature can verify the reconstructed data on our side self.remote_pub_key.verify(self.application_key +", "should have received sign(B)[K | H | hash(a * b)] # let's see", "so, subject to the following conditions: # # The above copyright notice and", "the server's public key (``byte`` string) :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "a_bob # this shall be hash(K | a * b | a *", "used by the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's public key (``byte``", "def verify_server_challenge(self, data): \"\"\"Verify the correctness of challenge sent from the server.\"\"\" assert", "+ self.shared_secret + self.a_bob + b_alice).digest() nonce = b\"\\x00\" * 24 try: #", "crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data): \"\"\"Verify that the server's accept message is", "ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate())", "VerifyKey(public_key) # will raise an exception if verification fails pkey.verify(signed, signature) self.remote_pub_key =", "(crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError from nacl.public import PrivateKey from nacl.signing", "correctness of challenge sent from the client.\"\"\" assert len(data) == 64 sent_hmac, remote_ephemeral_key", "= crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice # this is hash(K | a *", "nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError from nacl.public import PrivateKey", "None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates all the SHS client-side crypto. :param", "class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates all the SHS client-side crypto. :param local_key:", "b_alice # this is hash(K | a * b | a * B", "pass class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key =", "copy # of this software and associated documentation files (the \"Software\"), to deal", "generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00' *", "b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice # this is hash(K |", "24 try: # let's use the box secret to unbox our encrypted message", "self.remote_ephemeral_key = remote_ephemeral_key # this is hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return", "to H = sign(A)[K | Bp | hash(a * b)] | Ap signed_message", "* b)] # let's see if that signature can verify the reconstructed data", "to the following conditions: # # The above copyright notice and this permission", "= hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00' * 24, box_secret)", "is (a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this", "= self.remote_app_hmac == sent_hmac if ok: # this is (a * b) self.shared_secret", "clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None self.shared_hash = None self.remote_ephemeral_key =", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "self.hello = crypto_box_open_afternm(data, b'\\x00' * 24, box_secret) signature, public_key = self.hello[:64], self.hello[64:] signed", "# return box(K | a * b | a * B)[H] return crypto_box_afternm(self.hello,", "= remote_ephemeral_key # this is hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok", "signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key) # will raise", "signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello", "def verify_server_accept(self, data): \"\"\"Verify that the server's accept message is sane\"\"\" curve_lkey =", "\"\"\"A SHS exception.\"\"\" pass class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key =", "unbox our encrypted message signature = crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise SHSError('Error", "and associated documentation files (the \"Software\"), to deal # in the Software without", "signature, public_key = self.hello[:64], self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey", "b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + b_alice).digest() nonce = b\"\\x00\"", "= application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key", "def verify_client_auth(self, data): assert len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret =", "self.shared_secret = None self.shared_hash = None self.remote_ephemeral_key = None def get_box_keys(self): shared_secret =", "24, self.box_secret) return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice", "# and message_to_box will correspond to H = sign(A)[K | Bp | hash(a", "local_key self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key", "| H | hash(a * b)] # let's see if that signature can", "except CryptoError: raise SHSError('Error decrypting server acceptance message') # we should have received", "True def generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d = crypto_box_afternm(okay,", "application_key=None): self.local_key = local_key self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def", "the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's public key (``byte`` string) :param", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "| A * b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + b_alice).digest()", "# this is (a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key", "if verification fails pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret", "+ self.shared_hash, signature) return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.a_bob = None", "sublicense, and/or sell # copies of the Software, and to permit persons to", "Software is # furnished to do so, subject to the following conditions: #", "H = sign(A)[K | Bp | hash(a * b)] | Ap signed_message =", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "DEALINGS IN THE # SOFTWARE. import hashlib import hmac from base64 import b64decode", "hash(a * b)] | Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "client.\"\"\" assert len(data) == 64 sent_hmac, remote_ephemeral_key = data[:32], data[32:] h = hmac.new(self.application_key,", "let's use the box secret to unbox our encrypted message signature = crypto_box_open_afternm(data,", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "data): \"\"\"Verify the correctness of challenge sent from the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data)", "all # copies or substantial portions of the Software. # # THE SOFTWARE", "self.box_secret) except CryptoError: raise SHSError('Error decrypting server acceptance message') # we should have", "or PrivateKey.generate()) self.shared_secret = None self.shared_hash = None self.remote_ephemeral_key = None def get_box_keys(self):", "self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + b_alice).digest() nonce = b\"\\x00\" *", "b)] # let's see if that signature can verify the reconstructed data on", "message') # we should have received sign(B)[K | H | hash(a * b)]", "self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this is hash(a * b)", "sent to the server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify the", "to the server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify the correctness", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import hashlib import", "= None self.remote_ephemeral_key = None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret':", "application_key: the unique application key (``byte`` string), defaults to SSB's \"\"\" def __init__(self,", "+ a_bob).digest() # and message_to_box will correspond to H = sign(A)[K | Bp", "server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is (a * B)", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "self.remote_app_hmac == sent_hmac if ok: # this is (a * b) self.shared_secret =", "= hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + b_alice).digest() nonce = b\"\\x00\" * 24", "# copies of the Software, and to permit persons to whom the Software", "(A * b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice # this is", "= pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob", "24 # return box(K | a * b | a * B)[H] return", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "= local_key self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key):", "64 sent_hmac, remote_ephemeral_key = data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac =", "the server's accept message is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is (A", "= self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key) # will raise an", "SHS exception.\"\"\" pass class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key = local_key", "permission notice shall be included in all # copies or substantial portions of", ":param application_key: the unique application key (``byte`` string), defaults to SSB's \"\"\" def", "\"\"\" def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key =", "* b | a * B | A * b) self.box_secret = hashlib.sha256(self.application_key", "the box secret to unbox our encrypted message signature = crypto_box_open_afternm(data, nonce, self.box_secret)", "that signature can verify the reconstructed data on our side self.remote_pub_key.verify(self.application_key + self.hello", "box_secret) signature, public_key = self.hello[:64], self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash", "self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate and", "is hash(K | a * b | a * B | A *", "= b\"\\x00\" * 24 # return box(K | a * b | a", "class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key = application_key", "notice and this permission notice shall be included in all # copies or", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "+ self.hello + self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret) return d", "H | hash(a * b)] # let's see if that signature can verify", "self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret) return d def clean(self, new_ephemeral_key=None):", "software and associated documentation files (the \"Software\"), to deal # in the Software", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce':", "reconstructed data on our side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) return True", "None self.shared_hash = None self.remote_ephemeral_key = None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return", "\"\"\"Verify the correctness of challenge sent from the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey", "use the box secret to unbox our encrypted message signature = crypto_box_open_afternm(data, nonce,", "= signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box return True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\"", "encapsulates all the SHS client-side crypto. :param local_key: the keypair used by the", "public_key = self.hello[:64], self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey =", "* b | a * B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data):", "and to permit persons to whom the Software is # furnished to do", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the", "to SSB's \"\"\" def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key)", "server's accept message is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is (A *", "the following conditions: # # The above copyright notice and this permission notice", "assert len(data) == 64 sent_hmac, remote_ephemeral_key = data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key,", "sign(A)[K | Bp | hash(a * b)] | Ap signed_message = self.local_key.sign(self.application_key +", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "# furnished to do so, subject to the following conditions: # # The", "raise an exception if verification fails pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice =", "the Software, and to permit persons to whom the Software is # furnished", "from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError from nacl.public import", "rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "* b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this is hash(a", "* B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob # this shall be", "by the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's public key (``byte`` string)", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "defaults to SSB's \"\"\" def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key,", "= crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data,", "= self.hello[:64], self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key)", "local_key: the keypair used by the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's", "bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key) # will raise an exception if verification", "SSB's \"\"\" def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key", "merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to", "* B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() # and message_to_box will", "crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError from nacl.public import PrivateKey from nacl.signing import", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", ":param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param application_key: the unique application key (``byte``", "self.shared_secret + a_bob).digest() # and message_to_box will correspond to H = sign(A)[K |", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "class SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None):", "to do so, subject to the following conditions: # # The above copyright", "APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass class SHSCryptoBase(object): def __init__(self,", "\"\"\"Verify the correctness of challenge sent from the client.\"\"\" assert len(data) == 64", "crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise SHSError('Error decrypting server acceptance message') # we", "self.shared_hash, signature) return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.a_bob = None self.b_alice", "def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" * 24 # return box(K |", "* B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data): \"\"\"Verify that the server's", "pkey = VerifyKey(public_key) # will raise an exception if verification fails pkey.verify(signed, signature)", "(a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob # this shall", "= hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None", "local_key, ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or", "\"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" * 24 # return box(K | a *", "the reconstructed data on our side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) return", "b_alice).digest()[:32] return True def generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d", "self.b_alice = b_alice # this is hash(K | a * b | a", "this is (a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key #", "sent from the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is", "whom the Software is # furnished to do so, subject to the following", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "contributors (see AUTHORS for more details) # # Permission is hereby granted, free", "# Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) # #", "| Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature +", "= crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret) return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key)", "b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this is hash(a *", "free of charge, to any person obtaining a copy # of this software", "Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) # # Permission", "shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24]", "bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def", "from the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is (a", ":class:`nacl.public.PrivateKey` :param application_key: the unique application key (``byte`` string), defaults to SSB's \"\"\"", "self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is (a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key),", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software,", "or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac =", "__init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def", "Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key)", "# b_alice is (A * b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice", "crypto_box_open_afternm(data, b'\\x00' * 24, box_secret) signature, public_key = self.hello[:64], self.hello[64:] signed = self.application_key", "without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense,", "is # furnished to do so, subject to the following conditions: # #", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "is (a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob # this", "VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify the correctness of challenge sent from the server.\"\"\"", "= self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello =", "server_pub_key: the server's public key (``byte`` string) :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey`", "+ self.a_bob + b_alice).digest() nonce = b\"\\x00\" * 24 try: # let's use", "| Bp | hash(a * b)] | Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key)", "secret to unbox our encrypted message signature = crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError:", "from base64 import b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import", "to deal # in the Software without restriction, including without limitation the rights", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import hashlib", "to any person obtaining a copy # of this software and associated documentation", "hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data):", "PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32])", "and message_to_box will correspond to H = sign(A)[K | Bp | hash(a *", "application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac", "bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify the correctness of challenge sent from the client.\"\"\"", "ok = self.remote_app_hmac == sent_hmac if ok: # this is (a * b)", "in all # copies or substantial portions of the Software. # # THE", "b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32]", "have received sign(B)[K | H | hash(a * b)] # let's see if", "= message_to_box return True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" * 24", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "* b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + b_alice).digest() nonce =", "import CryptoError from nacl.public import PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY>", "112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello", "= self.remote_pub_key.to_curve25519_public_key() # a_bob is (a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob", "correctness of challenge sent from the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key()", "+ a_bob + b_alice).digest()[:32] return True def generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "* b | a * B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest()", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key),", "return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None self.shared_hash =", "hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class", "'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data) == 112 a_bob", "ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify the correctness of challenge", "# SOFTWARE. import hashlib import hmac from base64 import b64decode from nacl.bindings import", "(c) 2017 PySecretHandshake contributors (see AUTHORS for more details) # # Permission is", "Software, and to permit persons to whom the Software is # furnished to", "bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32] return True def", "'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] }", "def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def", "that the server's accept message is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is", "decrypting server acceptance message') # we should have received sign(B)[K | H |", "crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice # this is hash(K | a * b", "keypair used by the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's public key", "challenge sent from the client.\"\"\" assert len(data) == 64 sent_hmac, remote_ephemeral_key = data[:32],", "bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box return True", "this software and associated documentation files (the \"Software\"), to deal # in the", "b_alice is (A * b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice #", "sent_hmac if ok: # this is (a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key)", "data): \"\"\"Verify that the server's accept message is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() #", "OTHER DEALINGS IN THE # SOFTWARE. import hashlib import hmac from base64 import", "our side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) return True def clean(self, new_ephemeral_key=None):", "a_bob).digest() # and message_to_box will correspond to H = sign(A)[K | Bp |", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "self.hello + self.shared_hash, signature) return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.a_bob =", "granted, free of charge, to any person obtaining a copy # of this", "# a_bob is (a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob", "== 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest()", "import VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass class SHSCryptoBase(object):", "bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate and return a challenge to be sent", "b'\\x00' * 24, box_secret) signature, public_key = self.hello[:64], self.hello[64:] signed = self.application_key +", "application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify the correctness of challenge sent", "object) :param server_pub_key: the server's public key (``byte`` string) :param ephemeral_key: a fresh", "import hmac from base64 import b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from", "furnished to do so, subject to the following conditions: # # The above", "and this permission notice shall be included in all # copies or substantial", "modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "that encapsulates all the SHS client-side crypto. :param local_key: the keypair used by", "b\"\\x00\" * 24 # return box(K | a * b | a *", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "# Permission is hereby granted, free of charge, to any person obtaining a", "hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00' * 24, box_secret) signature,", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "box(K | a * b | a * B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret)", "verify_client_auth(self, data): assert len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key", "on our side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) return True def clean(self,", "ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None self.shared_hash = None", "publish, distribute, sublicense, and/or sell # copies of the Software, and to permit", "= hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac == sent_hmac if", "| a * b | a * B | A * b) self.box_secret", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac == sent_hmac if ok: # this", "ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param application_key: the unique application key (``byte`` string),", "'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self,", "client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's public key (``byte`` string) :param ephemeral_key:", "THE # SOFTWARE. import hashlib import hmac from base64 import b64decode from nacl.bindings", "message is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is (A * b) b_alice", "PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS exception.\"\"\"", "self.shared_hash pkey = VerifyKey(public_key) # will raise an exception if verification fails pkey.verify(signed,", "client-side crypto. :param local_key: the keypair used by the client (:class:`nacl.public.PrivateKey` object) :param", "h.digest()[:32] ok = self.remote_app_hmac == sent_hmac if ok: # this is (a *", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "verify the reconstructed data on our side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature)", "= hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32] return True def generate_accept(self): okay", "without restriction, including without limitation the rights # to use, copy, modify, merge,", "class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key)", "len(data) == 64 sent_hmac, remote_ephemeral_key = data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512')", "data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac ==", "== sent_hmac if ok: # this is (a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key),", "CryptoError: raise SHSError('Error decrypting server acceptance message') # we should have received sign(B)[K", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import hashlib import hmac", "APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key,", "None self.remote_ephemeral_key = None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret,", "self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret) return", "self.a_bob = a_bob # this shall be hash(K | a * b |", "+ b_alice).digest()[:32] return True def generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature", "# this is hash(K | a * b | a * B |", "AUTHORS for more details) # # Permission is hereby granted, free of charge,", "in the Software without restriction, including without limitation the rights # to use,", "a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob # this shall be hash(K |", "return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify the correctness of challenge sent", "verification fails pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret =", "or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512')", "a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello =", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates all the SHS client-side crypto. :param local_key: the", "the keypair used by the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's public", "self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key =", "this is hash(K | a * b | a * B | A", "server acceptance message') # we should have received sign(B)[K | H | hash(a", "return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice = None", "copies of the Software, and to permit persons to whom the Software is", "+ self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box return True def", "a * B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() # and message_to_box", "signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box return True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce", "public key (``byte`` string) :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param application_key: the", "is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is (A * b) b_alice =", "B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data): \"\"\"Verify that the server's accept", "a * B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data): \"\"\"Verify that the", "accept message is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is (A * b)", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32] return True", "self.a_bob + b_alice).digest() nonce = b\"\\x00\" * 24 try: # let's use the", "remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this is hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest()", "| a * B | A * b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret", "received sign(B)[K | H | hash(a * b)] # let's see if that", "notice shall be included in all # copies or substantial portions of the", "exception if verification fails pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key()))", "'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data) ==", "True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" * 24 # return box(K", "obtaining a copy # of this software and associated documentation files (the \"Software\"),", "def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key = application_key or APPLICATION_KEY", "key (``byte`` string) :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param application_key: the unique", "| a * b | a * B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice = None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that", "b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice # this is hash(K | a", "digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate and return a challenge to be sent to", "\"\"\"An object that encapsulates all the SHS client-side crypto. :param local_key: the keypair", "message_to_box will correspond to H = sign(A)[K | Bp | hash(a * b)]", "shall be included in all # copies or substantial portions of the Software.", "let's see if that signature can verify the reconstructed data on our side", "= b\"\\x00\" * 24 try: # let's use the box secret to unbox", "self.remote_ephemeral_key = None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key':", "nacl.exceptions import CryptoError from nacl.public import PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY =", "The above copyright notice and this permission notice shall be included in all", "and/or sell # copies of the Software, and to permit persons to whom", "self.hello + self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret) return d def", "+ b_alice).digest() nonce = b\"\\x00\" * 24 try: # let's use the box", "crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this is hash(a * b) self.shared_hash =", "None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret +", "+ bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase):", "box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" * 24 # return box(K | a * b", "# in the Software without restriction, including without limitation the rights # to", "OR OTHER DEALINGS IN THE # SOFTWARE. import hashlib import hmac from base64", "self.local_key = local_key self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self,", "a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00' * 24, box_secret) signature, public_key = self.hello[:64], self.hello[64:]", "b\"\\x00\" * 24 try: # let's use the box secret to unbox our", "data): \"\"\"Verify the correctness of challenge sent from the client.\"\"\" assert len(data) ==", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "unique application key (``byte`` string), defaults to SSB's \"\"\" def __init__(self, local_key, server_pub_key,", "| a * B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() # and", "self.remote_pub_key.to_curve25519_public_key() # a_bob is (a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob =", "acceptance message') # we should have received sign(B)[K | H | hash(a *", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key)", "local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self,", "encrypted message signature = crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise SHSError('Error decrypting server", "ok: # this is (a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key =", "nonce, self.box_secret) except CryptoError: raise SHSError('Error decrypting server acceptance message') # we should", ".digest()[:32]) def generate_challenge(self): \"\"\"Generate and return a challenge to be sent to the", "key (``byte`` string), defaults to SSB's \"\"\" def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None):", "return True def generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d =", "hash(K | a * b | a * B) self.box_secret = hashlib.sha256(self.application_key +", "import hashlib import hmac from base64 import b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm,", "any person obtaining a copy # of this software and associated documentation files", "self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key) # will raise an exception", "self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify the correctness of challenge sent from", "# # The above copyright notice and this permission notice shall be included", "crypto_scalarmult) from nacl.exceptions import CryptoError from nacl.public import PrivateKey from nacl.signing import VerifyKey", "+ bytes(self.local_key.verify_key) self.hello = message_to_box return True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce =", "\"Software\"), to deal # in the Software without restriction, including without limitation the", "SHSError('Error decrypting server acceptance message') # we should have received sign(B)[K | H", "shall be hash(K | a * b | a * B) self.box_secret =", "string) :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param application_key: the unique application key", "= sign(A)[K | Bp | hash(a * b)] | Ap signed_message = self.local_key.sign(self.application_key", "box secret to unbox our encrypted message signature = crypto_box_open_afternm(data, nonce, self.box_secret) except", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "a copy # of this software and associated documentation files (the \"Software\"), to", "deal # in the Software without restriction, including without limitation the rights #", "+ a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00' * 24, box_secret) signature, public_key = self.hello[:64],", "B | A * b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob +", "self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key) # will", "try: # let's use the box secret to unbox our encrypted message signature", "len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret +", "pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob +", "PrivateKey.generate()) self.shared_secret = None self.shared_hash = None self.remote_ephemeral_key = None def get_box_keys(self): shared_secret", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice = None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object", "(the \"Software\"), to deal # in the Software without restriction, including without limitation", "B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob # this shall be hash(K", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "+ bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key) # will raise an exception if", "box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00' * 24,", "+ self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret) return d def clean(self,", "distribute, sublicense, and/or sell # copies of the Software, and to permit persons", "b'\\x00' * 24, self.box_secret) return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello =", "self.hello = None self.b_alice = None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates all", "charge, to any person obtaining a copy # of this software and associated", "ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate", "import b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError from", "from nacl.exceptions import CryptoError from nacl.public import PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY", "clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice = None class SHSClientCrypto(SHSCryptoBase): \"\"\"An", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "+ self.hello + self.shared_hash, signature) return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.a_bob", "nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass class", "hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or", "message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box return True def generate_client_auth(self): \"\"\"Generate", "curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is (a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey))", "bytes(curve_pkey)) self.a_bob = a_bob # this shall be hash(K | a * b", "+ bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box return", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "challenge sent from the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob", "# we should have received sign(B)[K | H | hash(a * b)] #", "fresh local :class:`nacl.public.PrivateKey` :param application_key: the unique application key (``byte`` string), defaults to", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError from nacl.public", "to whom the Software is # furnished to do so, subject to the", "limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or", "be sent to the server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify", "bytes(self.local_key.verify_key) self.hello = message_to_box return True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\"", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "be included in all # copies or substantial portions of the Software. #", ":param server_pub_key: the server's public key (``byte`` string) :param ephemeral_key: a fresh local", "\"\"\"Verify that the server's accept message is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice", "base64 import b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError", "= crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise SHSError('Error decrypting server acceptance message') #", "| hash(a * b)] | Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash)", "self.box_secret) return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice =", "a_bob is (a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob #", "a * b | a * B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self,", "assert len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret", "= VerifyKey(public_key) # will raise an exception if verification fails pkey.verify(signed, signature) self.remote_pub_key", "is hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key", "def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None self.shared_hash = None self.remote_ephemeral_key", "copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED", "def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice = None class SHSClientCrypto(SHSCryptoBase):", "= b_alice # this is hash(K | a * b | a *", "the SHS client-side crypto. :param local_key: the keypair used by the client (:class:`nacl.public.PrivateKey`", "SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key", "message_to_box return True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" * 24 #", "* 24, self.box_secret) return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None", "d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice = None class", "portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "+ bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert", "self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data) == 112", "do so, subject to the following conditions: # # The above copyright notice", "* b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate())", "# this shall be hash(K | a * b | a * B)", "b | a * B | A * b) self.box_secret = hashlib.sha256(self.application_key +", "sent from the client.\"\"\" assert len(data) == 64 sent_hmac, remote_ephemeral_key = data[:32], data[32:]", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + b_alice).digest() nonce = b\"\\x00\" * 24 try:", "permit persons to whom the Software is # furnished to do so, subject", "get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key':", "return True def generate_client_auth(self): \"\"\"Generate box[K|a*b|a*B](H)\"\"\" nonce = b\"\\x00\" * 24 # return", "| a * B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data): \"\"\"Verify that", "Permission is hereby granted, free of charge, to any person obtaining a copy", "to unbox our encrypted message signature = crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise", "24, box_secret) signature, public_key = self.hello[:64], self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key) +", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "CryptoError from nacl.public import PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY> class", "_reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self):", "a fresh local :class:`nacl.public.PrivateKey` :param application_key: the unique application key (``byte`` string), defaults", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "verify_challenge(self, data): \"\"\"Verify the correctness of challenge sent from the client.\"\"\" assert len(data)", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "Software without restriction, including without limitation the rights # to use, copy, modify,", "will raise an exception if verification fails pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice", "self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32] return True def generate_accept(self):", "verify_server_accept(self, data): \"\"\"Verify that the server's accept message is sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key()", "the correctness of challenge sent from the client.\"\"\" assert len(data) == 64 sent_hmac,", "h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac == sent_hmac", "b | a * B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data): \"\"\"Verify", "# The above copyright notice and this permission notice shall be included in", "self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\\x00' *", "# of this software and associated documentation files (the \"Software\"), to deal #", "SOFTWARE. import hashlib import hmac from base64 import b64decode from nacl.bindings import (crypto_box_afternm,", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce':", "above copyright notice and this permission notice shall be included in all #", "hashlib import hmac from base64 import b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult)", "sell # copies of the Software, and to permit persons to whom the", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32] return True def generate_accept(self): okay =", "substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None self.shared_hash", "of challenge sent from the server.\"\"\" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() #", "self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac == sent_hmac if ok: # this is", "__init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key", "def generate_challenge(self): \"\"\"Generate and return a challenge to be sent to the server.\"\"\"", "signature) self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret", "hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac == sent_hmac if ok:", "if ok: # this is (a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key", "curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is (A * b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key)", "return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data): \"\"\"Verify that the server's accept message", "IN THE # SOFTWARE. import hashlib import hmac from base64 import b64decode from", "ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate and return a", "self.shared_secret + a_bob + b_alice).digest()[:32] return True def generate_accept(self): okay = self.local_key.sign(self.application_key +", "* B | A * b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob", "a * b | a * B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret +", "* 24, box_secret) signature, public_key = self.hello[:64], self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key)", "to be sent to the server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data):", "restriction, including without limitation the rights # to use, copy, modify, merge, publish,", "import PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS", "nonce, self.box_secret) def verify_server_accept(self, data): \"\"\"Verify that the server's accept message is sane\"\"\"", ":param local_key: the keypair used by the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "# # Permission is hereby granted, free of charge, to any person obtaining", "b)] | Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature", "data): assert len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key +", "(``byte`` string) :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param application_key: the unique application", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "this permission notice shall be included in all # copies or substantial portions", "b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret", "* b)] | Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box =", "return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.a_bob = None self.b_alice = None", "+ self.shared_secret + a_bob).digest() # and message_to_box will correspond to H = sign(A)[K", "USE OR OTHER DEALINGS IN THE # SOFTWARE. import hashlib import hmac from", "= None self.b_alice = None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates all the", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "# copies or substantial portions of the Software. # # THE SOFTWARE IS", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "def generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00'", "= self.local_key.to_curve25519_private_key() # b_alice is (A * b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice", "def verify_challenge(self, data): \"\"\"Verify the correctness of challenge sent from the client.\"\"\" assert", "self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() # and message_to_box will correspond to", "files (the \"Software\"), to deal # in the Software without restriction, including without", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify the correctness of challenge", "from nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass", "this shall be hash(K | a * b | a * B) self.box_secret", "signature) return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.a_bob = None self.b_alice =", "{ 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24],", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify the correctness of challenge sent from", "okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d = crypto_box_afternm(okay, b'\\x00' * 24,", "the server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): \"\"\"Verify the correctness of", "def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key)", "* 24 try: # let's use the box secret to unbox our encrypted", "the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate and return a challenge", "the client.\"\"\" assert len(data) == 64 sent_hmac, remote_ephemeral_key = data[:32], data[32:] h =", "following conditions: # # The above copyright notice and this permission notice shall", "of the Software, and to permit persons to whom the Software is #", "all the SHS client-side crypto. :param local_key: the keypair used by the client", "can verify the reconstructed data on our side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash,", "for more details) # # Permission is hereby granted, free of charge, to", "= hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() # and message_to_box will correspond to H", "fails pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key", "verify_server_challenge(self, data): \"\"\"Verify the correctness of challenge sent from the server.\"\"\" assert super(SHSClientCrypto,", "string), defaults to SSB's \"\"\" def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key,", "see if that signature can verify the reconstructed data on our side self.remote_pub_key.verify(self.application_key", "} class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()),", "= None self.shared_hash = None self.remote_ephemeral_key = None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest()", "self.local_key.to_curve25519_private_key() # b_alice is (A * b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice =", "(see AUTHORS for more details) # # Permission is hereby granted, free of", "an exception if verification fails pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key),", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "sane\"\"\" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is (A * b) b_alice = crypto_scalarmult(bytes(curve_lkey),", "crypto. :param local_key: the keypair used by the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key:", "self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box return True def generate_client_auth(self):", "we should have received sign(B)[K | H | hash(a * b)] # let's", "self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data) == 112 a_bob =", "is (A * b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice # this", "self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret =", "SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret", "sign(B)[K | H | hash(a * b)] # let's see if that signature", "a * B | A * b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret +", "nonce = b\"\\x00\" * 24 try: # let's use the box secret to", "PySecretHandshake contributors (see AUTHORS for more details) # # Permission is hereby granted,", "from the client.\"\"\" assert len(data) == 64 sent_hmac, remote_ephemeral_key = data[:32], data[32:] h", "B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() # and message_to_box will correspond", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "= VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify the correctness of challenge sent from the", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "challenge to be sent to the server.\"\"\" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self,", "including without limitation the rights # to use, copy, modify, merge, publish, distribute,", "from nacl.public import PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception):", "remote_ephemeral_key = data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok", "self.box_secret) def verify_server_accept(self, data): \"\"\"Verify that the server's accept message is sane\"\"\" curve_lkey", "super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify the correctness", "b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None,", "(:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's public key (``byte`` string) :param ephemeral_key: a", "application key (``byte`` string), defaults to SSB's \"\"\" def __init__(self, local_key, server_pub_key, ephemeral_key,", "application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify the", "be hash(K | a * b | a * B) self.box_secret = hashlib.sha256(self.application_key", "= None class SHSClientCrypto(SHSCryptoBase): \"\"\"An object that encapsulates all the SHS client-side crypto.", "b | a * B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() #", "remote_ephemeral_key # this is hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def", "| a * b | a * B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def", "copyright notice and this permission notice shall be included in all # copies", "signature can verify the reconstructed data on our side self.remote_pub_key.verify(self.application_key + self.hello +", "self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None self.shared_hash = None self.remote_ephemeral_key = None def", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None self.shared_hash = None self.remote_ephemeral_key = None", "generate_challenge(self): \"\"\"Generate and return a challenge to be sent to the server.\"\"\" return", "shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret", "+ self.shared_hash pkey = VerifyKey(public_key) # will raise an exception if verification fails", "assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is (a * B) a_bob", "associated documentation files (the \"Software\"), to deal # in the Software without restriction,", "hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() # and message_to_box will correspond to H =", "d = crypto_box_afternm(okay, b'\\x00' * 24, self.box_secret) return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto,", "= h.digest()[:32] ok = self.remote_app_hmac == sent_hmac if ok: # this is (a", "self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify the correctness of", "crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob # this shall be hash(K | a *", "hereby granted, free of charge, to any person obtaining a copy # of", "of this software and associated documentation files (the \"Software\"), to deal # in", "# let's see if that signature can verify the reconstructed data on our", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "a_bob + b_alice).digest()[:32] return True def generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello +", "2017 PySecretHandshake contributors (see AUTHORS for more details) # # Permission is hereby", "SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key = application_key or", "object that encapsulates all the SHS client-side crypto. :param local_key: the keypair used", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "and return a challenge to be sent to the server.\"\"\" return self.local_app_hmac +", "= crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32] return", "ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data): \"\"\"Verify", "= b64decode('<KEY> class SHSError(Exception): \"\"\"A SHS exception.\"\"\" pass class SHSCryptoBase(object): def __init__(self, local_key,", "# will raise an exception if verification fails pkey.verify(signed, signature) self.remote_pub_key = pkey", "Bp | hash(a * b)] | Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) +", "raise SHSError('Error decrypting server acceptance message') # we should have received sign(B)[K |", "SHS client-side crypto. :param local_key: the keypair used by the client (:class:`nacl.public.PrivateKey` object)", "(hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): \"\"\"Generate and return a challenge to be", "a * b | a * B | A * b) self.box_secret =", "* b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice # this is hash(K", "bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data)", "local :class:`nacl.public.PrivateKey` :param application_key: the unique application key (``byte`` string), defaults to SSB's", "data on our side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) return True def", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "== 64 sent_hmac, remote_ephemeral_key = data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac", "self.remote_ephemeral_key) self.b_alice = b_alice # this is hash(K | a * b |", "A * b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + b_alice).digest() nonce", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of", "self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box", "the Software is # furnished to do so, subject to the following conditions:", "subject to the following conditions: # # The above copyright notice and this" ]
[ "load_file def dummy_comp(trj1, trj2): return True class Trajectories: def __init__(self): self.nframes = 0", "thyme.utils.savenload import save_file, load_file def dummy_comp(trj1, trj2): return True class Trajectories: def __init__(self):", "np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj == (self.ntrjs - 1) and max_frame ==", "supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) elif format == \"xyz\": for trj", "name = f\"{name}_{len(self.alltrjs)}\" elif name is None and trj.name not in self.alltrjs: name", "self.in_trj_id[count : count + nframes] += np.arange(nframes) count += nframes def __getitem__(self, key):", "accept_id=None): if accept_id is None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id]", "frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys))", "count += nframes def __getitem__(self, key): return self.alltrjs[key] def __iter__(self): return self def", "ValueError(f\"frame index overflow {n_attrs}\") trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id]", "accept_id is None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id =", "None and trj.name not in self.alltrjs: name = trj.name elif name is None:", "(Harvard University) 2020 \"\"\" from copy import deepcopy import logging import numpy as", "0 for id_trj, trj in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count : count +", "label = name + \"_\" + label if stored_label not in self.alltrjs: newtrj", "# order trj by element order, label = species_to_order_label(trj.species) if name is None:", "self._iter_index = getattr(self, \"_iter_index\", 0) if self._iter_index >= len(self): raise StopIteration self._iter_index +=", "= self.alltrjs.pop(name, None) if trj is not None: self.nframes -= trj.nframes self.ntrjs -=", "trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp):", "None and not force_run: max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj ==", "enforced_format=format, ) if isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict,", "+= 1 return # order trj by element order, label = species_to_order_label(trj.species) if", "\"npz\"] or format is None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format,", "+ nframes] += id_trj self.in_trj_id[count : count + nframes] += np.arange(nframes) count +=", "@classmethod def from_file(cls, name: str, format: str = None, preserve_order: bool = False):", "False): \"\"\" pickle format: previous objects saved as pickle format \"\"\" obj =", "\"_iter_index\", 0) if self._iter_index >= len(self): raise StopIteration self._iter_index += 1 return self.get_frame(self._iter_index", "def to_dict(self): return {name: trj.to_dict() for name, trj in self.alltrjs.items()} @classmethod def from_file(cls,", "len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\") if not merge: if", "f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes += trj.nframes self.ntrjs += 1 return # order", "= np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj == (self.ntrjs - 1) and max_frame", "= len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise RuntimeError(f\"not enough per_frame_attrs\")", "print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\") if not merge: if name in self.alltrjs:", "__len__(self): return self.nframes def construct_id_list(self, force_run=False): if self.trj_id is not None and not", "oldtrj): logging.debug( f\"! Metadata is exactly the same. Merge to {stored_label}\" ) else:", "count = -1 # find all the previous trajectories for l in alldata:", "<NAME> (Harvard University) 2020 \"\"\" from copy import deepcopy import logging import numpy", "metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms", "set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise RuntimeError(f\"not enough per_frame_attrs\") for trj in trjs.alltrjs.values():", "self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format == \"poscar\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format)", "preserve_order=False): \"\"\" convert dictionary to a Trajectory instance \"\"\" trjs = Trajectories() for", "str, format: str = None): if format in [\"pickle\", \"npz\"] or format is", "return self.alltrjs[key] def __iter__(self): return self def __next__(self): self._iter_index = getattr(self, \"_iter_index\", 0)", ">= len(self): raise StopIteration self._iter_index += 1 return self.get_frame(self._iter_index - 1) def get_frame(self,", "2020 \"\"\" from copy import deepcopy import logging import numpy as np from", "is None and trj.name not in self.alltrjs: name = trj.name elif name is", "preserve_order: count = -1 # find all the previous trajectories for l in", "None self.in_trj_id = None self.global_id = None def __repr__(self) -> str: return f\"Trajectories", "= 0 for id_trj, trj in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count : count", "from_file(cls, name: str, format: str = None, preserve_order: bool = False): \"\"\" pickle", "str: return f\"Trajectories with {len(self.alltrjs)} trj\" def __str__(self): s = f\"{len(self.alltrjs)} trajectories with", "trj.to_dict() for name, trj in self.alltrjs.items()} @classmethod def from_file(cls, name: str, format: str", "thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import save_file, load_file def dummy_comp(trj1, trj2): return True", "= len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not", "\"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) if isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj))", "= self.global_id[accept_id] def save(self, name: str, format: str = None): if format in", "in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label == label:", "save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs += 1 return stored_label def add_trjs( self,", "in l: line_split = l.split(\"_\") else: line_split = l if label == line_split[0]:", "return f\"Trajectories with {len(self.alltrjs)} trj\" def __str__(self): s = f\"{len(self.alltrjs)} trajectories with {len(self)}", "not isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order)", "for name in self.alltrjs: s += f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return s def", "trj.name not in self.alltrjs: name = trj.name elif name is None: name =", "save_mode=True, ): if len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms =", "import species_to_order_label from thyme.utils.savenload import save_file, load_file def dummy_comp(trj1, trj2): return True class", "<reponame>nw13slx/thyme \"\"\" Data structure that contains a collection of trajectory objects <NAME> (Harvard", "trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs", "s += f\"{self.alltrjs[name]}\\n\" return s def __len__(self): return self.nframes def construct_id_list(self, force_run=False): if", "merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label, label, alldata, preserve_order): stored_label =", "enumerate(self.alltrjs.values()): attr = getattr(trj, key, None) if attr is None: raise ValueError(f\"not all", "else: line_split = l if label == line_split[0]: _count = int(line_split[1]) if _count", "trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array += [sub_array] if len(array[0].shape) <= 1:", "= Trajectories() for name, trj_dict in dictionary.items(): if not isinstance(trj_dict, dict): trj_dict =", "preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug( f\"! Metadata is not the same. Not", "trjs.construct_id_list() return trjs def pop_trj(self, name): trj = self.alltrjs.pop(name, None) if trj is", "{n_attrs}\") trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id]", "is not None: self.nframes -= trj.nframes self.ntrjs -= 1 def add_trj( self, trj,", "1): return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes)", "not None: self.nframes -= trj.nframes self.ntrjs -= 1 def add_trj( self, trj, name=None,", "def from_file(cls, name: str, format: str = None, preserve_order: bool = False): \"\"\"", "+ \"_\" + label label = name + \"_\" + label if stored_label", "species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label == label: stored_label = _label break if", "self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self,", "if trj is not None: self.nframes -= trj.nframes self.ntrjs -= 1 def add_trj(", "for trj in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs", "trj.nframes self.ntrjs += 1 return # order trj by element order, label =", "str = None, preserve_order: bool = False): \"\"\" pickle format: previous objects saved", "stored_label = None for _label, oldtrj in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if", "overflow {n_attrs}\") trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name =", "1 return # order trj by element order, label = species_to_order_label(trj.species) if name", "f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\" for name in self.alltrjs: s += f\"----{name}----\\n\" s", "enough per_frame_attrs\") for trj in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False,", "dummy_comp(trj1, trj2): return True class Trajectories: def __init__(self): self.nframes = 0 self.ntrjs =", "= None, preserve_order: bool = False): \"\"\" pickle format: previous objects saved as", "filename=name, enforced_format=format, ) elif format == \"xyz\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format)", "order trj by element order, label = species_to_order_label(trj.species) if name is None: stored_label", "_label break if stored_label is None: stored_label = label else: stored_label = name", "(self.ntrjs - 1) and max_frame == (self.nframes - 1): return self.trj_id = np.zeros(self.nframes,", "poscar\" ) logging.info(f\"save as {name}\") def to_dict(self): return {name: trj.to_dict() for name, trj", "\"_\" in l: line_split = l.split(\"_\") else: line_split = l if label ==", "self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) == 0: self.per_frame_attrs", "dictionary to a Trajectory instance \"\"\" trjs = Trajectories() for name, trj_dict in", "nframes def __getitem__(self, key): return self.alltrjs[key] def __iter__(self): return self def __next__(self): self._iter_index", "old_label == label: stored_label = _label break if stored_label is None: stored_label =", "= getattr(trj, key, None) if attr is None: raise ValueError(f\"not all trjs has", "preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for trj in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True,", "alldata: if \"_\" in l: line_split = l.split(\"_\") else: line_split = l if", "int(line_split[1]) if _count > count: count = _count if label != last_label: count", "raise NotImplementedError( f\"Output format {format} not supported:\" f\" try from pickle, xyz, poscar\"", "pickle format \"\"\" obj = load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, )", "== (self.ntrjs - 1) and max_frame == (self.nframes - 1): return self.trj_id =", "len(array[0].shape) <= 1: array = np.hstack(array) else: array = np.vstack(array) return array[self.global_id] def", "in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array += [sub_array] if len(array[0].shape) <= 1: array", "def pop_trj(self, name): trj = self.alltrjs.pop(name, None) if trj is not None: self.nframes", "s += f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return s def __len__(self): return self.nframes def", "save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) elif format == \"xyz\":", "max_frame = np.max(self.global_id) if max_trj == (self.ntrjs - 1) and max_frame == (self.nframes", "if isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False):", "trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) == 0: self.per_frame_attrs =", "trj.name elif name is None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes +=", "else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata is exactly the", "None: raise ValueError(f\"not all trjs has attr {attr}\") array = [] for id_trj,", "Trajectories: def __init__(self): self.nframes = 0 self.ntrjs = 0 self.alltrjs = {} self._iter_index", "+= f\"{self.alltrjs[name]}\\n\" return s def __len__(self): return self.nframes def construct_id_list(self, force_run=False): if self.trj_id", "stored_label = name + \"_\" + label label = name + \"_\" +", "= label else: stored_label = name + \"_\" + label label = name", "as pickle format \"\"\" obj = load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format,", "force_run: max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj == (self.ntrjs - 1)", "format in [\"pickle\", \"npz\"] or format is None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\":", "if stored_label is None: stored_label = label else: stored_label = name + \"_\"", "== \"poscar\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError( f\"Output format", "label if stored_label not in self.alltrjs: newtrj = Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label]", "collection of trajectory objects <NAME> (Harvard University) 2020 \"\"\" from copy import deepcopy", "self.trj_id = None self.in_trj_id = None self.global_id = None def __repr__(self) -> str:", "elif format == \"poscar\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError(", "= self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self, name: str, format: str = None):", "trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format == \"poscar\": for trj in self.alltrjs.values():", ") if isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False,", "np.arange(self.nframes) count = 0 for id_trj, trj in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count", "saved as pickle format \"\"\" obj = load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name,", "= name + \"_\" + label label = name + \"_\" + label", "trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self, name):", "enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array += [sub_array] if len(array[0].shape) <= 1: array =", "is None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) elif format", "a collection of trajectory objects <NAME> (Harvard University) 2020 \"\"\" from copy import", "None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) elif format ==", "max_trj == (self.ntrjs - 1) and max_frame == (self.nframes - 1): return self.trj_id", "def include_frames(self, accept_id=None): if accept_id is None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id", "name): trj = self.alltrjs.pop(name, None) if trj is not None: self.nframes -= trj.nframes", "the same. Merge to {stored_label}\" ) else: stored_label, last_label = obtain_store_label( last_label=\"NA0\", label=label,", "= set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\") if", "+ label if stored_label not in self.alltrjs: newtrj = Trajectory() newtrj.name = np.copy(stored_label)", "self.nframes = 0 self.ntrjs = 0 self.alltrjs = {} self._iter_index = 0 self.per_frame_attrs", "if stored_label not in self.alltrjs: newtrj = Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] =", "trajectories for l in alldata: if \"_\" in l: line_split = l.split(\"_\") else:", "supported:\" f\" try from pickle, xyz, poscar\" ) logging.info(f\"save as {name}\") def to_dict(self):", "in self.alltrjs: name = trj.name elif name is None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name]", "str = None): if format in [\"pickle\", \"npz\"] or format is None: save_file(", "= name + \"_\" + label if stored_label not in self.alltrjs: newtrj =", "and old_label == label: stored_label = _label break if stored_label is None: stored_label", "_count > count: count = _count if label != last_label: count += 1", "elif format == \"xyz\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format ==", "trj by element order, label = species_to_order_label(trj.species) if name is None: stored_label =", "def get_frame(self, idx, keys=None): n_attrs = len(self) if idx >= n_attrs: raise ValueError(f\"frame", "sub_array = trj.get_attr(key) array += [sub_array] if len(array[0].shape) <= 1: array = np.hstack(array)", "{len(self.alltrjs)} trj\" def __str__(self): s = f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\" for name", "get_frame(self, idx, keys=None): n_attrs = len(self) if idx >= n_attrs: raise ValueError(f\"frame index", "newtrj = Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj = self.alltrjs[stored_label]", "University) 2020 \"\"\" from copy import deepcopy import logging import numpy as np", "not None and not force_run: max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj", "format is None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) elif", "= Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self, name): trj", "intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\")", "id_trj, trj in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count : count + nframes] +=", "id_trj, trj in enumerate(self.alltrjs.values()): attr = getattr(trj, key, None) if attr is None:", "Trajectories() for name, trj_dict in dictionary.items(): if not isinstance(trj_dict, dict): trj_dict = trj_dict.item()", "objects <NAME> (Harvard University) 2020 \"\"\" from copy import deepcopy import logging import", "def get_attrs(self, key): self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()): attr = getattr(trj, key,", "format == \"xyz\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format == \"poscar\":", "metadata_compare=dummy_comp): trjs = Trajectories() for trj in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order,", "= species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label == label: stored_label = _label break", "= load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) if isinstance(obj, Trajectories): return", "trajectories with {len(self)} frames\\n\" for name in self.alltrjs: s += f\"----{name}----\\n\" s +=", "np.hstack(array) else: array = np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None): if accept_id is", "self.trj_id[count : count + nframes] += id_trj self.in_trj_id[count : count + nframes] +=", "0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if", "format) else: raise NotImplementedError( f\"Output format {format} not supported:\" f\" try from pickle,", "{stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs += 1 return stored_label", "is not None and not force_run: max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id) if", "self._iter_index >= len(self): raise StopIteration self._iter_index += 1 return self.get_frame(self._iter_index - 1) def", "for id_trj, trj in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count : count + nframes]", "supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) if isinstance(obj, Trajectories): return obj return", "if _count > count: count = _count if label != last_label: count +=", "f\"Output format {format} not supported:\" f\" try from pickle, xyz, poscar\" ) logging.info(f\"save", "self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label == label: stored_label", "def add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection =", "str, format: str = None, preserve_order: bool = False): \"\"\" pickle format: previous", "metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs self.alltrjs", "from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import save_file, load_file def dummy_comp(trj1, trj2): return", "deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms:", "structure that contains a collection of trajectory objects <NAME> (Harvard University) 2020 \"\"\"", "print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\") if not merge: if name in self.alltrjs: name", "line_split[0]: _count = int(line_split[1]) if _count > count: count = _count if label", "np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count = 0 for id_trj, trj in enumerate(self.alltrjs.values()):", "trj self.nframes += trj.nframes self.ntrjs += 1 return # order trj by element", "self.nframes -= trj.nframes self.ntrjs -= 1 def add_trj( self, trj, name=None, merge=False, preserve_order=False,", "def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for trj in self.alltrjs.values(): trjs.add_trj( trj,", "the previous trajectories for l in alldata: if \"_\" in l: line_split =", "of trajectory objects <NAME> (Harvard University) 2020 \"\"\" from copy import deepcopy import", "all the previous trajectories for l in alldata: if \"_\" in l: line_split", "newtrj else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata is exactly", "- 1) def get_frame(self, idx, keys=None): n_attrs = len(self) if idx >= n_attrs:", "array[self.global_id] def include_frames(self, accept_id=None): if accept_id is None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id]", "return trjs def obtain_store_label(last_label, label, alldata, preserve_order): stored_label = label if preserve_order: count", "= np.max(self.global_id) if max_trj == (self.ntrjs - 1) and max_frame == (self.nframes -", "trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key):", "trj is not None: self.nframes -= trj.nframes self.ntrjs -= 1 def add_trj( self,", "by element order, label = species_to_order_label(trj.species) if name is None: stored_label = None", "\"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) elif format == \"xyz\": for trj in self.alltrjs.values():", "if idx >= n_attrs: raise ValueError(f\"frame index overflow {n_attrs}\") trj_id = self.trj_id[idx] frame_id", "= obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug( f\"! Metadata", "trj.nframes self.ntrjs -= 1 def add_trj( self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True,", "merge: if name in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif name is None and", "1: array = np.hstack(array) else: array = np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None):", "== line_split[0]: _count = int(line_split[1]) if _count > count: count = _count if", ".trajectory import Trajectory from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import save_file, load_file def", "class Trajectories: def __init__(self): self.nframes = 0 self.ntrjs = 0 self.alltrjs = {}", "trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for trj in self.alltrjs.values(): trjs.add_trj(", "trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label, label, alldata,", "self.per_frame_attrs = [] self.trj_id = None self.in_trj_id = None self.global_id = None def", "is None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id]", "f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return s def __len__(self): return self.nframes def construct_id_list(self, force_run=False):", "key, None) if attr is None: raise ValueError(f\"not all trjs has attr {attr}\")", "format: previous objects saved as pickle format \"\"\" obj = load_file( supported_formats={\"npz\": \"npz\",", "for _label, oldtrj in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and", "self.alltrjs.items()} @classmethod def from_file(cls, name: str, format: str = None, preserve_order: bool =", "== 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs)", "add_trj( self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) == 0:", "l.split(\"_\") else: line_split = l if label == line_split[0]: _count = int(line_split[1]) if", "def __getitem__(self, key): return self.alltrjs[key] def __iter__(self): return self def __next__(self): self._iter_index =", ") def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs", "numpy as np from .trajectory import Trajectory from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload", "nframes] += np.arange(nframes) count += nframes def __getitem__(self, key): return self.alltrjs[key] def __iter__(self):", "trjs has attr {attr}\") array = [] for id_trj, trj in enumerate(self.alltrjs.values()): sub_array", "trj.get_attr(key) array += [sub_array] if len(array[0].shape) <= 1: array = np.hstack(array) else: array", "self.global_id = None def __repr__(self) -> str: return f\"Trajectories with {len(self.alltrjs)} trj\" def", "f\"Trajectories with {len(self.alltrjs)} trj\" def __str__(self): s = f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\"", "len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise RuntimeError(f\"not enough per_frame_attrs\") for", "that contains a collection of trajectory objects <NAME> (Harvard University) 2020 \"\"\" from", "Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\" convert", "= l.split(\"_\") else: line_split = l if label == line_split[0]: _count = int(line_split[1])", "order, label = species_to_order_label(trj.species) if name is None: stored_label = None for _label,", "(self.nframes - 1): return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id", "= self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self, name: str, format:", "idx >= n_attrs: raise ValueError(f\"frame index overflow {n_attrs}\") trj_id = self.trj_id[idx] frame_id =", "\"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) if isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod", "trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self,", "in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False,", "__str__(self): s = f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\" for name in self.alltrjs: s", "if self.trj_id is not None and not force_run: max_trj = np.max(self.trj_id) max_frame =", "self.nframes += trj.nframes self.ntrjs += 1 return stored_label def add_trjs( self, trjs, merge=False,", "+= trj.nframes self.ntrjs += 1 return # order trj by element order, label", "None, preserve_order: bool = False): \"\"\" pickle format: previous objects saved as pickle", "import save_file, load_file def dummy_comp(trj1, trj2): return True class Trajectories: def __init__(self): self.nframes", "trj in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def", "_label, oldtrj in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label", "raise RuntimeError(f\"not enough per_frame_attrs\") for trj in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order,", "save(self, name: str, format: str = None): if format in [\"pickle\", \"npz\"] or", "nframes] += id_trj self.in_trj_id[count : count + nframes] += np.arange(nframes) count += nframes", "idx, keys=None): n_attrs = len(self) if idx >= n_attrs: raise ValueError(f\"frame index overflow", "dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count = 0 for id_trj,", "trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs =", "= np.hstack(array) else: array = np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None): if accept_id", "raise RuntimeError(f\"not enough per_frame_attrs\") if not merge: if name in self.alltrjs: name =", "exactly the same. Merge to {stored_label}\" ) else: stored_label, last_label = obtain_store_label( last_label=\"NA0\",", "return s def __len__(self): return self.nframes def construct_id_list(self, force_run=False): if self.trj_id is not", "preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label, label, alldata, preserve_order): stored_label = label", "+ nframes] += np.arange(nframes) count += nframes def __getitem__(self, key): return self.alltrjs[key] def", "for name, trj_dict in dictionary.items(): if not isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj", "__repr__(self) -> str: return f\"Trajectories with {len(self.alltrjs)} trj\" def __str__(self): s = f\"{len(self.alltrjs)}", "-1 # find all the previous trajectories for l in alldata: if \"_\"", "break if stored_label is None: stored_label = label else: stored_label = name +", "if \"_\" in l: line_split = l.split(\"_\") else: line_split = l if label", "in enumerate(self.alltrjs.values()): attr = getattr(trj, key, None) if attr is None: raise ValueError(f\"not", "label == line_split[0]: _count = int(line_split[1]) if _count > count: count = _count", "preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self, name): trj = self.alltrjs.pop(name, None) if trj", "in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif name is None and trj.name not in", "= f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\" for name in self.alltrjs: s += f\"----{name}----\\n\"", "import numpy as np from .trajectory import Trajectory from thyme.utils.atomic_symbols import species_to_order_label from", "self._iter_index += 1 return self.get_frame(self._iter_index - 1) def get_frame(self, idx, keys=None): n_attrs =", "array = [] for id_trj, trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array +=", "else: array = np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None): if accept_id is None:", "Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs += 1 return", "s = f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\" for name in self.alltrjs: s +=", "self.global_id = np.arange(self.nframes) count = 0 for id_trj, trj in enumerate(self.alltrjs.values()): nframes =", "!= nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\") if not merge: if name", "\"\"\" Data structure that contains a collection of trajectory objects <NAME> (Harvard University)", "in self.alltrjs: newtrj = Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj", "self.get_frame(self._iter_index - 1) def get_frame(self, idx, keys=None): n_attrs = len(self) if idx >=", "StopIteration self._iter_index += 1 return self.get_frame(self._iter_index - 1) def get_frame(self, idx, keys=None): n_attrs", "format: str = None): if format in [\"pickle\", \"npz\"] or format is None:", "{name}\") def to_dict(self): return {name: trj.to_dict() for name, trj in self.alltrjs.items()} @classmethod def", "= [] for id_trj, trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array += [sub_array]", "instance \"\"\" trjs = Trajectories() for name, trj_dict in dictionary.items(): if not isinstance(trj_dict,", "if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\") if not merge:", "count + nframes] += id_trj self.in_trj_id[count : count + nframes] += np.arange(nframes) count", "def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs def", "== label: stored_label = _label break if stored_label is None: stored_label = label", "and trj.name not in self.alltrjs: name = trj.name elif name is None: name", "= trj self.nframes += trj.nframes self.ntrjs += 1 return # order trj by", "same. Merge to {stored_label}\" ) else: stored_label, last_label = obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs,", "np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f\"!", "l if label == line_split[0]: _count = int(line_split[1]) if _count > count: count", "self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self, name: str, format: str = None): if", "preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs", "logging.info(f\"save as {name}\") def to_dict(self): return {name: trj.to_dict() for name, trj in self.alltrjs.items()}", "if max_trj == (self.ntrjs - 1) and max_frame == (self.nframes - 1): return", "from pickle, xyz, poscar\" ) logging.info(f\"save as {name}\") def to_dict(self): return {name: trj.to_dict()", "if name is None: stored_label = None for _label, oldtrj in self.alltrjs.items(): old_order,", "name, trj in self.alltrjs.items()} @classmethod def from_file(cls, name: str, format: str = None,", "old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label == label: stored_label =", "preserve_order): stored_label = label if preserve_order: count = -1 # find all the", "if label == line_split[0]: _count = int(line_split[1]) if _count > count: count =", "__init__(self): self.nframes = 0 self.ntrjs = 0 self.alltrjs = {} self._iter_index = 0", "self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count = 0 for id_trj, trj", "!= nterms: raise RuntimeError(f\"not enough per_frame_attrs\") for trj in trjs.alltrjs.values(): self.add_trj( trj, name=None,", "self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label, label,", "label != last_label: count += 1 last_label = label stored_label = f\"{label}_{count}\" return", "self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif name is None and trj.name not in self.alltrjs:", "getattr(trj, key, None) if attr is None: raise ValueError(f\"not all trjs has attr", "self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self, name: str,", "alldata, preserve_order): stored_label = label if preserve_order: count = -1 # find all", "in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label,", "in dictionary.items(): if not isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj,", "l: line_split = l.split(\"_\") else: line_split = l if label == line_split[0]: _count", "has attr {attr}\") array = [] for id_trj, trj in enumerate(self.alltrjs.values()): sub_array =", "in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError( f\"Output format {format} not supported:\" f\"", "None for _label, oldtrj in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj)", "trj in self.alltrjs.items()} @classmethod def from_file(cls, name: str, format: str = None, preserve_order:", "= None self.global_id = None def __repr__(self) -> str: return f\"Trajectories with {len(self.alltrjs)}", "last_label = obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug( f\"!", "pickle format: previous objects saved as pickle format \"\"\" obj = load_file( supported_formats={\"npz\":", "to a Trajectory instance \"\"\" trjs = Trajectories() for name, trj_dict in dictionary.items():", "== (self.nframes - 1): return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int)", "__next__(self): self._iter_index = getattr(self, \"_iter_index\", 0) if self._iter_index >= len(self): raise StopIteration self._iter_index", "\"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) elif format == \"xyz\": for trj in", "): if len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs)", "return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\" convert dictionary to a", "remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for trj in self.alltrjs.values(): trjs.add_trj( trj, name=None,", "= Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj = self.alltrjs[stored_label] if", "[] self.trj_id = None self.in_trj_id = None self.global_id = None def __repr__(self) ->", "self.ntrjs += 1 return # order trj by element order, label = species_to_order_label(trj.species)", "= {} self._iter_index = 0 self.per_frame_attrs = [] self.trj_id = None self.in_trj_id =", "filename=name, enforced_format=format, ) if isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary:", "stored_label, last_label = obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug(", "alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug( f\"! Metadata is not the same.", "line_split = l.split(\"_\") else: line_split = l if label == line_split[0]: _count =", "ValueError(f\"not all trjs has attr {attr}\") array = [] for id_trj, trj in", "save_mode: nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs)", "None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes += trj.nframes self.ntrjs += 1", "= self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def", "if accept_id is None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id", "if metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata is exactly the same. Merge to {stored_label}\"", "): nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise RuntimeError(f\"not", "keys=None): n_attrs = len(self) if idx >= n_attrs: raise ValueError(f\"frame index overflow {n_attrs}\")", "NotImplementedError( f\"Output format {format} not supported:\" f\" try from pickle, xyz, poscar\" )", "contains a collection of trajectory objects <NAME> (Harvard University) 2020 \"\"\" from copy", "-> str: return f\"Trajectories with {len(self.alltrjs)} trj\" def __str__(self): s = f\"{len(self.alltrjs)} trajectories", "{stored_label}\" ) else: stored_label, last_label = obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label]", "for l in alldata: if \"_\" in l: line_split = l.split(\"_\") else: line_split", "\"xyz\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format == \"poscar\": for trj", "_count = int(line_split[1]) if _count > count: count = _count if label !=", "name in self.alltrjs: s += f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return s def __len__(self):", "self.global_id[accept_id] def save(self, name: str, format: str = None): if format in [\"pickle\",", "all trjs has attr {attr}\") array = [] for id_trj, trj in enumerate(self.alltrjs.values()):", "1 return stored_label def add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms =", "{format} not supported:\" f\" try from pickle, xyz, poscar\" ) logging.info(f\"save as {name}\")", "= trj.name elif name is None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes", "\"\"\" trjs = Trajectories() for name, trj_dict in dictionary.items(): if not isinstance(trj_dict, dict):", "def __len__(self): return self.nframes def construct_id_list(self, force_run=False): if self.trj_id is not None and", "+= [sub_array] if len(array[0].shape) <= 1: array = np.hstack(array) else: array = np.vstack(array)", "self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if", "None): if format in [\"pickle\", \"npz\"] or format is None: save_file( self.to_dict(), supported_formats={\"npz\":", "list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list() for id_trj, trj in", "format: str = None, preserve_order: bool = False): \"\"\" pickle format: previous objects", "metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label, label, alldata, preserve_order): stored_label = label if", "= _label break if stored_label is None: stored_label = label else: stored_label =", "the same. Not merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes", "None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def", "None: self.nframes -= trj.nframes self.ntrjs -= 1 def add_trj( self, trj, name=None, merge=False,", "self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs += 1 return stored_label def add_trjs(", "preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms:", "name in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif name is None and trj.name not", "self._iter_index = 0 self.per_frame_attrs = [] self.trj_id = None self.in_trj_id = None self.global_id", "dtype=int) self.global_id = np.arange(self.nframes) count = 0 for id_trj, trj in enumerate(self.alltrjs.values()): nframes", "trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self, name): trj = self.alltrjs.pop(name,", "dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()): attr", "not force_run: max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj == (self.ntrjs -", "for id_trj, trj in enumerate(self.alltrjs.values()): attr = getattr(trj, key, None) if attr is", "{} self._iter_index = 0 self.per_frame_attrs = [] self.trj_id = None self.in_trj_id = None", "enforced_format=format, ) elif format == \"xyz\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif", "len(self) if idx >= n_attrs: raise ValueError(f\"frame index overflow {n_attrs}\") trj_id = self.trj_id[idx]", "to {stored_label}\" ) else: stored_label, last_label = obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, )", "return self.get_frame(self._iter_index - 1) def get_frame(self, idx, keys=None): n_attrs = len(self) if idx", "if format in [\"pickle\", \"npz\"] or format is None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\",", "return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()):", "n_attrs: raise ValueError(f\"frame index overflow {n_attrs}\") trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx] trj", "array = np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None): if accept_id is None: return", "cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\" convert dictionary to a Trajectory", "Metadata is exactly the same. Merge to {stored_label}\" ) else: stored_label, last_label =", "return stored_label def add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs)", "= label if preserve_order: count = -1 # find all the previous trajectories", "= l if label == line_split[0]: _count = int(line_split[1]) if _count > count:", "> count: count = _count if label != last_label: count += 1 last_label", "attr is None: raise ValueError(f\"not all trjs has attr {attr}\") array = []", "== \"xyz\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format == \"poscar\": for", "a Trajectory instance \"\"\" trjs = Trajectories() for name, trj_dict in dictionary.items(): if", "label else: stored_label = name + \"_\" + label label = name +", "return # order trj by element order, label = species_to_order_label(trj.species) if name is", "\"\"\" pickle format: previous objects saved as pickle format \"\"\" obj = load_file(", "set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\") if not", "merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs += 1", "copy import deepcopy import logging import numpy as np from .trajectory import Trajectory", "def from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\" convert dictionary to a Trajectory instance \"\"\"", "trj in enumerate(self.alltrjs.values()): attr = getattr(trj, key, None) if attr is None: raise", "id_trj self.in_trj_id[count : count + nframes] += np.arange(nframes) count += nframes def __getitem__(self,", "name = trj.name elif name is None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj", "stored_label is None: stored_label = label else: stored_label = name + \"_\" +", "species_to_order_label(trj.species) if name is None: stored_label = None for _label, oldtrj in self.alltrjs.items():", "def save(self, name: str, format: str = None): if format in [\"pickle\", \"npz\"]", "self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count =", "and not force_run: max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj == (self.ntrjs", "for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError( f\"Output format {format} not", "len(intersection) != nterms: raise RuntimeError(f\"not enough per_frame_attrs\") for trj in trjs.alltrjs.values(): self.add_trj( trj,", "count + nframes] += np.arange(nframes) count += nframes def __getitem__(self, key): return self.alltrjs[key]", "0 self.ntrjs = 0 self.alltrjs = {} self._iter_index = 0 self.per_frame_attrs = []", "self.ntrjs = 0 self.alltrjs = {} self._iter_index = 0 self.per_frame_attrs = [] self.trj_id", "= None def __repr__(self) -> str: return f\"Trajectories with {len(self.alltrjs)} trj\" def __str__(self):", "stored_label not in self.alltrjs: newtrj = Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj", "self def __next__(self): self._iter_index = getattr(self, \"_iter_index\", 0) if self._iter_index >= len(self): raise", "if metadata_compare(trj, oldtrj) and old_label == label: stored_label = _label break if stored_label", "same. Not merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs", "= len(self) if idx >= n_attrs: raise ValueError(f\"frame index overflow {n_attrs}\") trj_id =", "trjs = self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs", "count: count = _count if label != last_label: count += 1 last_label =", "from copy import deepcopy import logging import numpy as np from .trajectory import", "raise ValueError(f\"not all trjs has attr {attr}\") array = [] for id_trj, trj", "nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise", "+ label label = name + \"_\" + label if stored_label not in", "= deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) !=", "trj2): return True class Trajectories: def __init__(self): self.nframes = 0 self.ntrjs = 0", "Trajectory() logging.debug( f\"! Metadata is not the same. Not merge. Buil {stored_label}\" )", "trajectory objects <NAME> (Harvard University) 2020 \"\"\" from copy import deepcopy import logging", "= list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list()", "line_split = l if label == line_split[0]: _count = int(line_split[1]) if _count >", "isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\"", "obtain_store_label(last_label, label, alldata, preserve_order): stored_label = label if preserve_order: count = -1 #", "self.alltrjs: s += f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return s def __len__(self): return self.nframes", "self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata is exactly the same. Merge to", "+= 1 return stored_label def add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms", "array += [sub_array] if len(array[0].shape) <= 1: array = np.hstack(array) else: array =", "trj in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self,", "-= 1 def add_trj( self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if", "np.arange(nframes) count += nframes def __getitem__(self, key): return self.alltrjs[key] def __iter__(self): return self", "= trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def", "key): return self.alltrjs[key] def __iter__(self): return self def __next__(self): self._iter_index = getattr(self, \"_iter_index\",", "np.max(self.global_id) if max_trj == (self.ntrjs - 1) and max_frame == (self.nframes - 1):", "as {name}\") def to_dict(self): return {name: trj.to_dict() for name, trj in self.alltrjs.items()} @classmethod", "= False): \"\"\" pickle format: previous objects saved as pickle format \"\"\" obj", "nterms: raise RuntimeError(f\"not enough per_frame_attrs\") for trj in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge,", "None def __repr__(self) -> str: return f\"Trajectories with {len(self.alltrjs)} trj\" def __str__(self): s", "or format is None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, )", "self.alltrjs[stored_label] = Trajectory() logging.debug( f\"! Metadata is not the same. Not merge. Buil", "len(self): raise StopIteration self._iter_index += 1 return self.get_frame(self._iter_index - 1) def get_frame(self, idx,", "self.trj_id[idx] frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id,", "previous objects saved as pickle format \"\"\" obj = load_file( supported_formats={\"npz\": \"npz\", \"pickle\":", "is None: stored_label = None for _label, oldtrj in self.alltrjs.items(): old_order, old_label =", "1) and max_frame == (self.nframes - 1): return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id", "+= 1 return self.get_frame(self._iter_index - 1) def get_frame(self, idx, keys=None): n_attrs = len(self)", "format \"\"\" obj = load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) if", "if len(array[0].shape) <= 1: array = np.hstack(array) else: array = np.vstack(array) return array[self.global_id]", "\"poscar\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError( f\"Output format {format}", "metadata_compare(trj, oldtrj) and old_label == label: stored_label = _label break if stored_label is", "list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list() for", "None self.global_id = None def __repr__(self) -> str: return f\"Trajectories with {len(self.alltrjs)} trj\"", "keys=keys)) def get_attrs(self, key): self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()): attr = getattr(trj,", "max_frame == (self.nframes - 1): return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes,", "nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough per_frame_attrs\") if not merge: if name in", "{name: trj.to_dict() for name, trj in self.alltrjs.items()} @classmethod def from_file(cls, name: str, format:", "element order, label = species_to_order_label(trj.species) if name is None: stored_label = None for", "convert dictionary to a Trajectory instance \"\"\" trjs = Trajectories() for name, trj_dict", "self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self, name: str, format: str", "enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count : count + nframes] += id_trj self.in_trj_id[count :", "\"pickle\"}, filename=name, enforced_format=format, ) if isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod def", "RuntimeError(f\"not enough per_frame_attrs\") if not merge: if name in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\"", "for trj in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def", "@staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\" convert dictionary to a Trajectory instance", "self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for trj in", "preserve_order: bool = False): \"\"\" pickle format: previous objects saved as pickle format", ") self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs += 1 return stored_label def", "obj = load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) if isinstance(obj, Trajectories):", "self.alltrjs.pop(name, None) if trj is not None: self.nframes -= trj.nframes self.ntrjs -= 1", "__getitem__(self, key): return self.alltrjs[key] def __iter__(self): return self def __next__(self): self._iter_index = getattr(self,", "is None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes += trj.nframes self.ntrjs +=", "= f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes += trj.nframes self.ntrjs += 1 return #", "= _count if label != last_label: count += 1 last_label = label stored_label", "+= id_trj self.in_trj_id[count : count + nframes] += np.arange(nframes) count += nframes def", "metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise", "trjs def obtain_store_label(last_label, label, alldata, preserve_order): stored_label = label if preserve_order: count =", ") else: stored_label, last_label = obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] =", "# find all the previous trajectories for l in alldata: if \"_\" in", "name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label, label, alldata, preserve_order): stored_label", "last_label: count += 1 last_label = label stored_label = f\"{label}_{count}\" return stored_label, last_label", "n_attrs = len(self) if idx >= n_attrs: raise ValueError(f\"frame index overflow {n_attrs}\") trj_id", "def obtain_store_label(last_label, label, alldata, preserve_order): stored_label = label if preserve_order: count = -1", "if self._iter_index >= len(self): raise StopIteration self._iter_index += 1 return self.get_frame(self._iter_index - 1)", "merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self,", "per_frame_attrs\") if not merge: if name in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif name", "def add_trj( self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) ==", "logging import numpy as np from .trajectory import Trajectory from thyme.utils.atomic_symbols import species_to_order_label", "import deepcopy import logging import numpy as np from .trajectory import Trajectory from", "trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return", "format == \"poscar\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError( f\"Output", "= None for _label, oldtrj in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj,", "l in alldata: if \"_\" in l: line_split = l.split(\"_\") else: line_split =", "= np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count = 0 for id_trj, trj in", "self.ntrjs -= 1 def add_trj( self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ):", "= np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None): if accept_id is None: return self.construct_id_list()", "from .trajectory import Trajectory from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import save_file, load_file", "1 def add_trj( self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs)", "= self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata is exactly the same. Merge", "for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format == \"poscar\": for trj in", "\"\"\" from copy import deepcopy import logging import numpy as np from .trajectory", "self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()): attr = getattr(trj, key, None) if attr", "if len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs) intersection", "name + \"_\" + label label = name + \"_\" + label if", "with {len(self.alltrjs)} trj\" def __str__(self): s = f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\" for", "= np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count = 0", "oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata is exactly the same.", "is None: stored_label = label else: stored_label = name + \"_\" + label", "if not merge: if name in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif name is", "trj_dict in dictionary.items(): if not isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict)", "return trjs def pop_trj(self, name): trj = self.alltrjs.pop(name, None) if trj is not", "return True class Trajectories: def __init__(self): self.nframes = 0 self.ntrjs = 0 self.alltrjs", "trj.save(f\"{trj.name}_{name}\", format) elif format == \"poscar\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else:", "else: stored_label = name + \"_\" + label label = name + \"_\"", "self.ntrjs += 1 return stored_label def add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ):", "count = 0 for id_trj, trj in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count :", "-= trj.nframes self.ntrjs -= 1 def add_trj( self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp,", "merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del", "previous trajectories for l in alldata: if \"_\" in l: line_split = l.split(\"_\")", "trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError( f\"Output format {format} not supported:\"", "= newtrj else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata is", "trj.nframes self.trj_id[count : count + nframes] += id_trj self.in_trj_id[count : count + nframes]", "dict, merge=False, preserve_order=False): \"\"\" convert dictionary to a Trajectory instance \"\"\" trjs =", "save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs self.alltrjs =", "with {len(self)} frames\\n\" for name in self.alltrjs: s += f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\"", ": count + nframes] += id_trj self.in_trj_id[count : count + nframes] += np.arange(nframes)", "np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None): if accept_id is None: return self.construct_id_list() self.trj_id", "in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format == \"poscar\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\",", "pop_trj(self, name): trj = self.alltrjs.pop(name, None) if trj is not None: self.nframes -=", "trj.nframes self.ntrjs += 1 return stored_label def add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp,", "raise StopIteration self._iter_index += 1 return self.get_frame(self._iter_index - 1) def get_frame(self, idx, keys=None):", "elif name is None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes += trj.nframes", "= 0 self.alltrjs = {} self._iter_index = 0 self.per_frame_attrs = [] self.trj_id =", "1 return self.get_frame(self._iter_index - 1) def get_frame(self, idx, keys=None): n_attrs = len(self) if", "self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) elif format == \"xyz\": for", "return self.nframes def construct_id_list(self, force_run=False): if self.trj_id is not None and not force_run:", "Trajectories() for trj in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return", "newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj):", "trj = self.alltrjs.pop(name, None) if trj is not None: self.nframes -= trj.nframes self.ntrjs", "name + \"_\" + label if stored_label not in self.alltrjs: newtrj = Trajectory()", "label, alldata, preserve_order): stored_label = label if preserve_order: count = -1 # find", "merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif", "def __repr__(self) -> str: return f\"Trajectories with {len(self.alltrjs)} trj\" def __str__(self): s =", "return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count", "Merge to {stored_label}\" ) else: stored_label, last_label = obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True,", "to_dict(self): return {name: trj.to_dict() for name, trj in self.alltrjs.items()} @classmethod def from_file(cls, name:", "merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self, name): trj = self.alltrjs.pop(name, None) if", "for id_trj, trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array += [sub_array] if len(array[0].shape)", "None: stored_label = label else: stored_label = name + \"_\" + label label", "preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode:", "__iter__(self): return self def __next__(self): self._iter_index = getattr(self, \"_iter_index\", 0) if self._iter_index >=", "Trajectory from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import save_file, load_file def dummy_comp(trj1, trj2):", "in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count : count + nframes] += id_trj self.in_trj_id[count", "Not merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs +=", "= 0 self.per_frame_attrs = [] self.trj_id = None self.in_trj_id = None self.global_id =", "np from .trajectory import Trajectory from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import save_file,", "from thyme.utils.savenload import save_file, load_file def dummy_comp(trj1, trj2): return True class Trajectories: def", "oldtrj) and old_label == label: stored_label = _label break if stored_label is None:", "trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list() for id_trj,", "+= trj.nframes self.ntrjs += 1 return stored_label def add_trjs( self, trjs, merge=False, preserve_order=False,", "find all the previous trajectories for l in alldata: if \"_\" in l:", "1) def get_frame(self, idx, keys=None): n_attrs = len(self) if idx >= n_attrs: raise", "if name in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif name is None and trj.name", "stored_label def add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection", "self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for trj", "name is None: stored_label = None for _label, oldtrj in self.alltrjs.items(): old_order, old_label", "0 self.alltrjs = {} self._iter_index = 0 self.per_frame_attrs = [] self.trj_id = None", "key): self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()): attr = getattr(trj, key, None) if", "in self.alltrjs.items()} @classmethod def from_file(cls, name: str, format: str = None, preserve_order: bool", "self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self, name: str, format: str =", "name is None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes += trj.nframes self.ntrjs", "= Trajectory() logging.debug( f\"! Metadata is not the same. Not merge. Buil {stored_label}\"", "nframes = trj.nframes self.trj_id[count : count + nframes] += id_trj self.in_trj_id[count : count", "attr {attr}\") array = [] for id_trj, trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key)", "include_frames(self, accept_id=None): if accept_id is None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id =", "= self.trj_id[idx] frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name,", "def __str__(self): s = f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\" for name in self.alltrjs:", "self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection)", "logging.debug( f\"! Metadata is not the same. Not merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj,", "count = _count if label != last_label: count += 1 last_label = label", "Trajectory instance \"\"\" trjs = Trajectories() for name, trj_dict in dictionary.items(): if not", "per_frame_attrs\") for trj in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, )", "name: str, format: str = None): if format in [\"pickle\", \"npz\"] or format", "if preserve_order: count = -1 # find all the previous trajectories for l", "= np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug(", "get_attrs(self, key): self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()): attr = getattr(trj, key, None)", "label: stored_label = _label break if stored_label is None: stored_label = label else:", "stored_label = label if preserve_order: count = -1 # find all the previous", "= list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list() for id_trj, trj", "nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise RuntimeError(f\"not enough", ") logging.info(f\"save as {name}\") def to_dict(self): return {name: trj.to_dict() for name, trj in", "[sub_array] if len(array[0].shape) <= 1: array = np.hstack(array) else: array = np.vstack(array) return", "RuntimeError(f\"not enough per_frame_attrs\") for trj in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare,", "np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count = 0 for", "f\"{self.alltrjs[name]}\\n\" return s def __len__(self): return self.nframes def construct_id_list(self, force_run=False): if self.trj_id is", "elif name is None and trj.name not in self.alltrjs: name = trj.name elif", "s def __len__(self): return self.nframes def construct_id_list(self, force_run=False): if self.trj_id is not None", "\"\"\" convert dictionary to a Trajectory instance \"\"\" trjs = Trajectories() for name,", "= trj.get_attr(key) array += [sub_array] if len(array[0].shape) <= 1: array = np.hstack(array) else:", "= None): if format in [\"pickle\", \"npz\"] or format is None: save_file( self.to_dict(),", "try from pickle, xyz, poscar\" ) logging.info(f\"save as {name}\") def to_dict(self): return {name:", "label label = name + \"_\" + label if stored_label not in self.alltrjs:", "metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata is exactly the same. Merge to {stored_label}\" )", "f\"! Metadata is exactly the same. Merge to {stored_label}\" ) else: stored_label, last_label", "load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) if isinstance(obj, Trajectories): return obj", "enough per_frame_attrs\") if not merge: if name in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif", "order=order) self.nframes += trj.nframes self.ntrjs += 1 return stored_label def add_trjs( self, trjs,", "- 1): return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id =", "self.nframes += trj.nframes self.ntrjs += 1 return # order trj by element order,", "_count if label != last_label: count += 1 last_label = label stored_label =", "self.in_trj_id = None self.global_id = None def __repr__(self) -> str: return f\"Trajectories with", "import Trajectory from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import save_file, load_file def dummy_comp(trj1,", "label if preserve_order: count = -1 # find all the previous trajectories for", "xyz, poscar\" ) logging.info(f\"save as {name}\") def to_dict(self): return {name: trj.to_dict() for name,", "label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug( f\"! Metadata is not the", "+= f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return s def __len__(self): return self.nframes def construct_id_list(self,", ") return trjs def obtain_store_label(last_label, label, alldata, preserve_order): stored_label = label if preserve_order:", "self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError( f\"Output format {format} not supported:\" f\" try", "max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj == (self.ntrjs - 1) and", "def __iter__(self): return self def __next__(self): self._iter_index = getattr(self, \"_iter_index\", 0) if self._iter_index", "len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f\"not enough", "None) if attr is None: raise ValueError(f\"not all trjs has attr {attr}\") array", "0) if self._iter_index >= len(self): raise StopIteration self._iter_index += 1 return self.get_frame(self._iter_index -", "is None: raise ValueError(f\"not all trjs has attr {attr}\") array = [] for", "trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection)", "name is None and trj.name not in self.alltrjs: name = trj.name elif name", "self.alltrjs: newtrj = Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj =", "not in self.alltrjs: newtrj = Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj else:", "obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\" convert dictionary to", "obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug( f\"! Metadata is", "import logging import numpy as np from .trajectory import Trajectory from thyme.utils.atomic_symbols import", "= self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs =", "trj in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count : count + nframes] += id_trj", "= getattr(self, \"_iter_index\", 0) if self._iter_index >= len(self): raise StopIteration self._iter_index += 1", "getattr(self, \"_iter_index\", 0) if self._iter_index >= len(self): raise StopIteration self._iter_index += 1 return", "{attr}\") array = [] for id_trj, trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array", "bool = False): \"\"\" pickle format: previous objects saved as pickle format \"\"\"", "not in self.alltrjs: name = trj.name elif name is None: name = f\"{len(self.alltrjs)}\"", "objects saved as pickle format \"\"\" obj = load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"},", "self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self, name:", "stored_label = label else: stored_label = name + \"_\" + label label =", "frames\\n\" for name in self.alltrjs: s += f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return s", "= trj.nframes self.trj_id[count : count + nframes] += id_trj self.in_trj_id[count : count +", "return self def __next__(self): self._iter_index = getattr(self, \"_iter_index\", 0) if self._iter_index >= len(self):", "f\"{name}_{len(self.alltrjs)}\" elif name is None and trj.name not in self.alltrjs: name = trj.name", "oldtrj in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label ==", "= [] self.trj_id = None self.in_trj_id = None self.global_id = None def __repr__(self)", "name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self, name): trj = self.alltrjs.pop(name, None)", "self.alltrjs = {} self._iter_index = 0 self.per_frame_attrs = [] self.trj_id = None self.in_trj_id", "if len(intersection) != nterms: raise RuntimeError(f\"not enough per_frame_attrs\") for trj in trjs.alltrjs.values(): self.add_trj(", "None) if trj is not None: self.nframes -= trj.nframes self.ntrjs -= 1 def", "Metadata is not the same. Not merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order)", "self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs", "!= last_label: count += 1 last_label = label stored_label = f\"{label}_{count}\" return stored_label,", ") elif format == \"xyz\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) elif format", "name, trj_dict in dictionary.items(): if not isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj =", "self.alltrjs[key] def __iter__(self): return self def __next__(self): self._iter_index = getattr(self, \"_iter_index\", 0) if", "= -1 # find all the previous trajectories for l in alldata: if", "isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list()", "is not the same. Not merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes", "format) elif format == \"poscar\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\", format) else: raise", "[\"pickle\", \"npz\"] or format is None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name,", "def construct_id_list(self, force_run=False): if self.trj_id is not None and not force_run: max_trj =", "construct_id_list(self, force_run=False): if self.trj_id is not None and not force_run: max_trj = np.max(self.trj_id)", "return {name: trj.to_dict() for name, trj in self.alltrjs.items()} @classmethod def from_file(cls, name: str,", "trjs = Trajectories() for name, trj_dict in dictionary.items(): if not isinstance(trj_dict, dict): trj_dict", "else: stored_label, last_label = obtain_store_label( last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory()", "is exactly the same. Merge to {stored_label}\" ) else: stored_label, last_label = obtain_store_label(", "def __next__(self): self._iter_index = getattr(self, \"_iter_index\", 0) if self._iter_index >= len(self): raise StopIteration", "self.global_id = self.global_id[accept_id] def save(self, name: str, format: str = None): if format", "+ \"_\" + label if stored_label not in self.alltrjs: newtrj = Trajectory() newtrj.name", "return obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\" convert dictionary", "dict): trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return", "old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label == label: stored_label = _label", "del self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for", "trj.save(f\"{trj.name}_{name}\", format) else: raise NotImplementedError( f\"Output format {format} not supported:\" f\" try from", "in alldata: if \"_\" in l: line_split = l.split(\"_\") else: line_split = l", "**trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()): attr =", "self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories()", "[] for id_trj, trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array += [sub_array] if", "self.alltrjs: name = trj.name elif name is None: name = f\"{len(self.alltrjs)}\" self.alltrjs[name] =", "species_to_order_label from thyme.utils.savenload import save_file, load_file def dummy_comp(trj1, trj2): return True class Trajectories:", "= np.arange(self.nframes) count = 0 for id_trj, trj in enumerate(self.alltrjs.values()): nframes = trj.nframes", "name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs)", "label = species_to_order_label(trj.species) if name is None: stored_label = None for _label, oldtrj", "if attr is None: raise ValueError(f\"not all trjs has attr {attr}\") array =", "force_run=False): if self.trj_id is not None and not force_run: max_trj = np.max(self.trj_id) max_frame", "dictionary.items(): if not isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name,", "True class Trajectories: def __init__(self): self.nframes = 0 self.ntrjs = 0 self.alltrjs =", "{len(self)} frames\\n\" for name in self.alltrjs: s += f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return", "None: stored_label = None for _label, oldtrj in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species)", "Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj,", "trj\" def __str__(self): s = f\"{len(self.alltrjs)} trajectories with {len(self)} frames\\n\" for name in", "- 1) and max_frame == (self.nframes - 1): return self.trj_id = np.zeros(self.nframes, dtype=int)", "in [\"pickle\", \"npz\"] or format is None: save_file( self.to_dict(), supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"},", "as np from .trajectory import Trajectory from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import", "\"_\" + label label = name + \"_\" + label if stored_label not", ") self.alltrjs[stored_label] = Trajectory() logging.debug( f\"! Metadata is not the same. Not merge.", "Data structure that contains a collection of trajectory objects <NAME> (Harvard University) 2020", "save_file, load_file def dummy_comp(trj1, trj2): return True class Trajectories: def __init__(self): self.nframes =", "= f\"{name}_{len(self.alltrjs)}\" elif name is None and trj.name not in self.alltrjs: name =", "if label != last_label: count += 1 last_label = label stored_label = f\"{label}_{count}\"", "from_dict(dictionary: dict, merge=False, preserve_order=False): \"\"\" convert dictionary to a Trajectory instance \"\"\" trjs", "self.trj_id is not None and not force_run: max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id)", "not supported:\" f\" try from pickle, xyz, poscar\" ) logging.info(f\"save as {name}\") def", "= trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for trj in self.alltrjs.values():", "pickle, xyz, poscar\" ) logging.info(f\"save as {name}\") def to_dict(self): return {name: trj.to_dict() for", "\"_\" + label if stored_label not in self.alltrjs: newtrj = Trajectory() newtrj.name =", "= None self.in_trj_id = None self.global_id = None def __repr__(self) -> str: return", "self.alltrjs[name] = trj self.nframes += trj.nframes self.ntrjs += 1 return # order trj", "f\"! Metadata is not the same. Not merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False,", "trjs def pop_trj(self, name): trj = self.alltrjs.pop(name, None) if trj is not None:", "def dummy_comp(trj1, trj2): return True class Trajectories: def __init__(self): self.nframes = 0 self.ntrjs", "name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge()", "0 self.per_frame_attrs = [] self.trj_id = None self.in_trj_id = None self.global_id = None", "len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs) intersection =", "add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs)", "not merge: if name in self.alltrjs: name = f\"{name}_{len(self.alltrjs)}\" elif name is None", "not the same. Not merge. Buil {stored_label}\" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes +=", "id_trj, trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array += [sub_array] if len(array[0].shape) <=", ": count + nframes] += np.arange(nframes) count += nframes def __getitem__(self, key): return", "elif save_mode: nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs)", "logging.debug( f\"! Metadata is exactly the same. Merge to {stored_label}\" ) else: stored_label,", "Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self, name): trj =", "= Trajectories() for trj in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, )", "merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) !=", "merge=False, preserve_order=False): \"\"\" convert dictionary to a Trajectory instance \"\"\" trjs = Trajectories()", "in self.alltrjs: s += f\"----{name}----\\n\" s += f\"{self.alltrjs[name]}\\n\" return s def __len__(self): return", "return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self,", "if not isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge,", "and max_frame == (self.nframes - 1): return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id =", "+= nframes def __getitem__(self, key): return self.alltrjs[key] def __iter__(self): return self def __next__(self):", "<= 1: array = np.hstack(array) else: array = np.vstack(array) return array[self.global_id] def include_frames(self,", "= 0 self.ntrjs = 0 self.alltrjs = {} self._iter_index = 0 self.per_frame_attrs =", "name = f\"{len(self.alltrjs)}\" self.alltrjs[name] = trj self.nframes += trj.nframes self.ntrjs += 1 return", "\"\"\" obj = load_file( supported_formats={\"npz\": \"npz\", \"pickle\": \"pickle\"}, filename=name, enforced_format=format, ) if isinstance(obj,", "self.nframes def construct_id_list(self, force_run=False): if self.trj_id is not None and not force_run: max_trj", "attr = getattr(trj, key, None) if attr is None: raise ValueError(f\"not all trjs", "+= np.arange(nframes) count += nframes def __getitem__(self, key): return self.alltrjs[key] def __iter__(self): return", "last_label=\"NA0\", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug( f\"! Metadata is not", "= species_to_order_label(trj.species) if name is None: stored_label = None for _label, oldtrj in", "else: raise NotImplementedError( f\"Output format {format} not supported:\" f\" try from pickle, xyz,", "= set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise RuntimeError(f\"not enough per_frame_attrs\") for trj in", "deepcopy import logging import numpy as np from .trajectory import Trajectory from thyme.utils.atomic_symbols", "intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise RuntimeError(f\"not enough per_frame_attrs\") for trj", ">= n_attrs: raise ValueError(f\"frame index overflow {n_attrs}\") trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx]", "trjs = Trajectories() for trj in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare,", "raise ValueError(f\"frame index overflow {n_attrs}\") trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx] trj =", "array = np.hstack(array) else: array = np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None): if", "f\" try from pickle, xyz, poscar\" ) logging.info(f\"save as {name}\") def to_dict(self): return", "stored_label = _label break if stored_label is None: stored_label = label else: stored_label", "self.alltrjs[stored_label] = newtrj else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f\"! Metadata", "trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label, label, alldata, preserve_order):", "def __init__(self): self.nframes = 0 self.ntrjs = 0 self.alltrjs = {} self._iter_index =", "return array[self.global_id] def include_frames(self, accept_id=None): if accept_id is None: return self.construct_id_list() self.trj_id =", "index overflow {n_attrs}\") trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name", "for name, trj in self.alltrjs.items()} @classmethod def from_file(cls, name: str, format: str =", "= int(line_split[1]) if _count > count: count = _count if label != last_label:", "format {format} not supported:\" f\" try from pickle, xyz, poscar\" ) logging.info(f\"save as", "preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False,", "name: str, format: str = None, preserve_order: bool = False): \"\"\" pickle format:", "metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp):", "\"pickle\"}, filename=name, enforced_format=format, ) elif format == \"xyz\": for trj in self.alltrjs.values(): trj.save(f\"{trj.name}_{name}\"," ]
[ "se ele é bissexto. \"\"\" print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40)", "um programa que leia um ano qualquer e mostre se ele é bissexto.", "= int(input(\"Insira um ano: \")) if ano % 4 == 0 and ano", "* 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40) ano = int(input(\"Insira um ano: \"))", "ano % 4 == 0 and ano % 100 == 0 or ano", "== 0 and ano % 100 == 0 or ano % 400 ==", "um ano qualquer e mostre se ele é bissexto. \"\"\" print(\"-\" * 40)", "4 == 0 and ano % 100 == 0 or ano % 400", "or ano % 400 == 0: print(f\"O ano {ano} é BISSEXTO !!!\") else:", "ano % 400 == 0: print(f\"O ano {ano} é BISSEXTO !!!\") else: print(f'Este", "100 == 0 or ano % 400 == 0: print(f\"O ano {ano} é", "Bissexto\":^40}') print(\"-\" * 40) ano = int(input(\"Insira um ano: \")) if ano %", "Exercício Python 32: Faça um programa que leia um ano qualquer e mostre", "ano qualquer e mostre se ele é bissexto. \"\"\" print(\"-\" * 40) print(f'{\"Ano", "print(\"-\" * 40) ano = int(input(\"Insira um ano: \")) if ano % 4", "mostre se ele é bissexto. \"\"\" print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" *", "if ano % 4 == 0 and ano % 100 == 0 or", "e mostre se ele é bissexto. \"\"\" print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\"", "print(f\"O ano {ano} é BISSEXTO !!!\") else: print(f'Este {ano} não é BISSEXTO !!!')", "\"\"\" print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40) ano = int(input(\"Insira um", "print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40) ano = int(input(\"Insira um ano: \")) if ano", "\"\"\" Exercício Python 32: Faça um programa que leia um ano qualquer e", "programa que leia um ano qualquer e mostre se ele é bissexto. \"\"\"", "Python 32: Faça um programa que leia um ano qualquer e mostre se", "0 or ano % 400 == 0: print(f\"O ano {ano} é BISSEXTO !!!\")", "ele é bissexto. \"\"\" print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40) ano", "== 0 or ano % 400 == 0: print(f\"O ano {ano} é BISSEXTO", "40) ano = int(input(\"Insira um ano: \")) if ano % 4 == 0", "que leia um ano qualquer e mostre se ele é bissexto. \"\"\" print(\"-\"", "ano: \")) if ano % 4 == 0 and ano % 100 ==", "% 4 == 0 and ano % 100 == 0 or ano %", "% 100 == 0 or ano % 400 == 0: print(f\"O ano {ano}", "and ano % 100 == 0 or ano % 400 == 0: print(f\"O", "% 400 == 0: print(f\"O ano {ano} é BISSEXTO !!!\") else: print(f'Este {ano}", "* 40) ano = int(input(\"Insira um ano: \")) if ano % 4 ==", "40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40) ano = int(input(\"Insira um ano: \")) if", "qualquer e mostre se ele é bissexto. \"\"\" print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}')", "int(input(\"Insira um ano: \")) if ano % 4 == 0 and ano %", "print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40) ano = int(input(\"Insira um ano:", "um ano: \")) if ano % 4 == 0 and ano % 100", "leia um ano qualquer e mostre se ele é bissexto. \"\"\" print(\"-\" *", "0 and ano % 100 == 0 or ano % 400 == 0:", "== 0: print(f\"O ano {ano} é BISSEXTO !!!\") else: print(f'Este {ano} não é", "ano = int(input(\"Insira um ano: \")) if ano % 4 == 0 and", "Faça um programa que leia um ano qualquer e mostre se ele é", "bissexto. \"\"\" print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40) ano = int(input(\"Insira", "400 == 0: print(f\"O ano {ano} é BISSEXTO !!!\") else: print(f'Este {ano} não", "ano % 100 == 0 or ano % 400 == 0: print(f\"O ano", "32: Faça um programa que leia um ano qualquer e mostre se ele", "0: print(f\"O ano {ano} é BISSEXTO !!!\") else: print(f'Este {ano} não é BISSEXTO", "é bissexto. \"\"\" print(\"-\" * 40) print(f'{\"Ano Bissexto\":^40}') print(\"-\" * 40) ano =", "\")) if ano % 4 == 0 and ano % 100 == 0", "<gh_stars>0 \"\"\" Exercício Python 32: Faça um programa que leia um ano qualquer" ]
[ "# do we need this? arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y',", "chunks=(512, 512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr =", "cast from mapshader.transforms import orient_array from mapshader.transforms import flip_coords from mapshader.transforms import reproject_raster", "squeeze(arr, 'band') arr = cast(arr, dtype='float64') arr = orient_array(arr) arr = flip_coords(arr, dim='y')", "NetCDF File', file=sys.stdout) if not parsed.o: output_file = input_file.replace('.tif', '.nc') else: output_file =", "arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor':", "os import path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file", "= path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to NetCDF File', file=sys.stdout) if not parsed.o:", "need this? arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x':", "def run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr,", "mapshader.transforms import cast from mapshader.transforms import orient_array from mapshader.transforms import flip_coords from mapshader.transforms", "arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue':", "= flip_coords(arr, dim='y') # do we need this? arr = reproject_raster(arr, epsg=3857) dataset", "= xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = orient_array(arr) arr = flip_coords(arr, dim='y')", "path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i))", "== '__main__': import sys from argparse import ArgumentParser from os import path parser", "from mapshader.transforms import flip_coords from mapshader.transforms import reproject_raster def run_float(input_file, output_file, chunks=(512, 512),", "import cast from mapshader.transforms import orient_array from mapshader.transforms import flip_coords from mapshader.transforms import", "arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def", "'_FillValue': fill_value}}) if __name__ == '__main__': import sys from argparse import ArgumentParser from", "{'dtype': 'int16', '_FillValue': fill_value}}) if __name__ == '__main__': import sys from argparse import", "xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = orient_array(arr) arr = flip_coords(arr, dim='y') #", "orient_array(arr) arr = flip_coords(arr, dim='y') # do we need this? arr = reproject_raster(arr,", "ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from", "mapshader.transforms import flip_coords from mapshader.transforms import reproject_raster def run_float(input_file, output_file, chunks=(512, 512), name='data',", "512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = orient_array(arr)", "from mapshader.transforms import reproject_raster def run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr", "dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if __name__ == '__main__':", "mapshader.transforms import squeeze from mapshader.transforms import cast from mapshader.transforms import orient_array from mapshader.transforms", "mapshader.transforms import orient_array from mapshader.transforms import flip_coords from mapshader.transforms import reproject_raster def run_float(input_file,", "name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = orient_array(arr) arr", "arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y':", "from mapshader.transforms import cast from mapshader.transforms import orient_array from mapshader.transforms import flip_coords from", "import sys from argparse import ArgumentParser from os import path parser = ArgumentParser()", "= xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file,", "0.1, '_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file)", "'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16',", "= reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']})", "reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs", "reproject_raster def run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr", "arr = squeeze(arr, 'band') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do", "mapshader.transforms import reproject_raster def run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr =", "arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = cast(arr, dtype='float64') arr =", "= cast(arr, dtype='float64') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we", "flip_coords from mapshader.transforms import reproject_raster def run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999):", "if __name__ == '__main__': import sys from argparse import ArgumentParser from os import", "'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if __name__", "from TIFF to NetCDF File', file=sys.stdout) if not parsed.o: output_file = input_file.replace('.tif', '.nc')", "= orient_array(arr) arr = flip_coords(arr, dim='y') # do we need this? arr =", "= parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to NetCDF File', file=sys.stdout)", "= dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file, output_file,", "dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512,", "arr = squeeze(arr, 'band') arr = cast(arr, dtype='float64') arr = orient_array(arr) arr =", "arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = orient_array(arr) arr = flip_coords(arr,", "= xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = cast(arr, dtype='float64') arr = orient_array(arr)", "file=sys.stdout) if not parsed.o: output_file = input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if", "arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if __name__ ==", "coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1,", "parser.add_argument('-f') parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to NetCDF", "import flip_coords from mapshader.transforms import reproject_raster def run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1,", "= ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file}", "output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band')", "arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue':", "parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to", "encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if __name__ == '__main__': import sys from argparse", "fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = cast(arr, dtype='float64') arr", "'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999): arr =", "output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file) else: run_int(input_file, output_file) print(f'Conversion Complete: {output_file}',", "this? arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'],", "orient_array from mapshader.transforms import flip_coords from mapshader.transforms import reproject_raster def run_float(input_file, output_file, chunks=(512,", "'.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file) else: run_int(input_file, output_file) print(f'Conversion", "output_file, chunks=(512, 512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr", "{'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999):", "input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file) else: run_int(input_file, output_file)", "import xarray as xr from mapshader.transforms import squeeze from mapshader.transforms import cast from", "print(f'Converting {input_file} from TIFF to NetCDF File', file=sys.stdout) if not parsed.o: output_file =", "'int16', '_FillValue': fill_value}}) if __name__ == '__main__': import sys from argparse import ArgumentParser", "dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512, 512),", "xarray as xr from mapshader.transforms import squeeze from mapshader.transforms import cast from mapshader.transforms", "import path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file =", "'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}})", "as xr from mapshader.transforms import squeeze from mapshader.transforms import cast from mapshader.transforms import", "xr from mapshader.transforms import squeeze from mapshader.transforms import cast from mapshader.transforms import orient_array", "squeeze(arr, 'band') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we need", "'band') arr = cast(arr, dtype='float64') arr = orient_array(arr) arr = flip_coords(arr, dim='y') #", "def run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr =", "parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF", "'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999): arr", "(['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype':", "File', file=sys.stdout) if not parsed.o: output_file = input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o))", "'__main__': import sys from argparse import ArgumentParser from os import path parser =", "name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = cast(arr,", "if not parsed.o: output_file = input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f:", "dtype='float64') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we need this?", "-9999}}) def run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr =", "arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we need this? arr", "flip_coords(arr, dim='y') # do we need this? arr = reproject_raster(arr, epsg=3857) dataset =", "= squeeze(arr, 'band') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we", "__name__ == '__main__': import sys from argparse import ArgumentParser from os import path", "from os import path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args()", "{input_file} from TIFF to NetCDF File', file=sys.stdout) if not parsed.o: output_file = input_file.replace('.tif',", "import squeeze from mapshader.transforms import cast from mapshader.transforms import orient_array from mapshader.transforms import", "= squeeze(arr, 'band') arr = cast(arr, dtype='float64') arr = orient_array(arr) arr = flip_coords(arr,", "argparse import ArgumentParser from os import path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f')", "fill_value}}) if __name__ == '__main__': import sys from argparse import ArgumentParser from os", "'band') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we need this?", "dim='y') # do we need this? arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name:", "dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if __name__ == '__main__': import sys from", "dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file,", "xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data':", "arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if", "TIFF to NetCDF File', file=sys.stdout) if not parsed.o: output_file = input_file.replace('.tif', '.nc') else:", "arr = flip_coords(arr, dim='y') # do we need this? arr = reproject_raster(arr, epsg=3857)", "chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr", "else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file) else: run_int(input_file, output_file) print(f'Conversion Complete:", "import reproject_raster def run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file)", "'_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr", "run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr,", "parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to NetCDF File', file=sys.stdout) if", "= path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file) else: run_int(input_file, output_file) print(f'Conversion Complete: {output_file}', file=sys.stdout)", "sys from argparse import ArgumentParser from os import path parser = ArgumentParser() parser.add_argument('-i')", "parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to NetCDF File',", "import orient_array from mapshader.transforms import flip_coords from mapshader.transforms import reproject_raster def run_float(input_file, output_file,", "= input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file) else: run_int(input_file,", "scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = cast(arr, dtype='float64')", "output_file = input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file) else:", "from mapshader.transforms import orient_array from mapshader.transforms import flip_coords from mapshader.transforms import reproject_raster def", "arr = cast(arr, dtype='float64') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do", "fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = orient_array(arr) arr =", "cast(arr, dtype='float64') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we need", "import ArgumentParser from os import path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed", "512), name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr =", "encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512, 512), name='data',", "not parsed.o: output_file = input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file,", "input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to NetCDF File', file=sys.stdout) if not", "parsed.o: output_file = input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file)", "parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting", "run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band')", "epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs =", "ArgumentParser from os import path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed =", "dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name)", "to NetCDF File', file=sys.stdout) if not parsed.o: output_file = input_file.replace('.tif', '.nc') else: output_file", "coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}})", "from mapshader.transforms import squeeze from mapshader.transforms import cast from mapshader.transforms import orient_array from", "xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = cast(arr, dtype='float64') arr = orient_array(arr) arr", "dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if __name__ == '__main__': import sys", "do we need this? arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'],", "= dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if __name__ == '__main__': import", "squeeze from mapshader.transforms import cast from mapshader.transforms import orient_array from mapshader.transforms import flip_coords", "from argparse import ArgumentParser from os import path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o')", "path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to NetCDF File', file=sys.stdout) if not parsed.o: output_file", "we need this? arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}," ]
[ "imagem e máscaras, e retorna array de pixel como atributo n = img.shape[dim_prof]", "= img[mask_1].size/n t0 = img[mask_0].size/n t2 = img[mask_2].size/n t3 = img[mask_3].size/n t4 =", "# -*- encoding: utf-8 -*- def extract_feature_pixel(img, mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[],", "-*- def extract_feature_pixel(img, mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy as", "mask_5!=[]: fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 = np.concatenate((eval5,fives), axis=1) atr_slice =", "array de pixel como atributo n = img.shape[dim_prof] t1 = img[mask_1].size/n t0 =", "atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 =", "atr3 = np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4 eval4", "eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos", "= np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2 eval2 =", "eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes", "if mask_4!=[]: fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice", "= np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 = np.concatenate((eval5,fives), axis=1) atr_slice = np.vstack([atr_slice,atr5]) return", "axis=1) if mask_0!=[]: zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1)", "= np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if", "mask_2!=[]: twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice =", "np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T", "utf-8 -*- def extract_feature_pixel(img, mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy", "np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]:", "t3 = img[mask_3].size/n t4 = img[mask_4].size/n t5 = img[mask_5].size/n ones = np.ones((t1,1)) eval1", "pixel como atributo n = img.shape[dim_prof] t1 = img[mask_1].size/n t0 = img[mask_0].size/n t2", "= np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if", "img[mask_1].size/n t0 = img[mask_0].size/n t2 = img[mask_2].size/n t3 = img[mask_3].size/n t4 = img[mask_4].size/n", "= np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 =", "mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy as np #Função de leitura da imagem", "de leitura da imagem e máscaras, e retorna array de pixel como atributo", "eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives", "img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2", "= np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5 eval5 =", "eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours", "img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T", "= np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes),", "atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 =", "extract_feature_pixel(img, mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy as np #Função", "img[mask_5].size/n ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]:", "= img[mask_5].size/n ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if", "leitura da imagem e máscaras, e retorna array de pixel como atributo n", "= np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours),", "np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T", "def extract_feature_pixel(img, mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy as np", "np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]:", "= img[mask_0].size/n t2 = img[mask_2].size/n t3 = img[mask_3].size/n t4 = img[mask_4].size/n t5 =", "retorna array de pixel como atributo n = img.shape[dim_prof] t1 = img[mask_1].size/n t0", "atr4 = np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5 eval5", "mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy as np #Função de leitura", "mask_0!=[]: zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice =", "= img[mask_3].size/n t4 = img[mask_4].size/n t5 = img[mask_5].size/n ones = np.ones((t1,1)) eval1 =", "e máscaras, e retorna array de pixel como atributo n = img.shape[dim_prof] t1", "np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T", "t5 = img[mask_5].size/n ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1)", "atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0", "img[mask_3].size/n t4 = img[mask_4].size/n t5 = img[mask_5].size/n ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T", "dim_prof=0): import numpy as np #Função de leitura da imagem e máscaras, e", "ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros", "= img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos =", "atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 =", "numpy as np #Função de leitura da imagem e máscaras, e retorna array", "atributo n = img.shape[dim_prof] t1 = img[mask_1].size/n t0 = img[mask_0].size/n t2 = img[mask_2].size/n", "img.shape[dim_prof] t1 = img[mask_1].size/n t0 = img[mask_0].size/n t2 = img[mask_2].size/n t3 = img[mask_3].size/n", "= img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours =", "= img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros = np.zeros((t0,1)) eval0 =", "= np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 = np.concatenate((eval5,fives),", "np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1)", "t4 = img[mask_4].size/n t5 = img[mask_5].size/n ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice", "mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy as np #Função de leitura da", "máscaras, e retorna array de pixel como atributo n = img.shape[dim_prof] t1 =", "np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 = np.concatenate((eval5,fives), axis=1) atr_slice = np.vstack([atr_slice,atr5]) return atr_slice", "mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy as np #Função de", "mask_5=[], dim_prof=0): import numpy as np #Função de leitura da imagem e máscaras,", "= img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives =", "axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5", "np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T", "np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]:", "np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros),", "= img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes =", "img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3", "np #Função de leitura da imagem e máscaras, e retorna array de pixel", "= np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3 eval3 =", "encoding: utf-8 -*- def extract_feature_pixel(img, mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import", "fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4])", "np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros = np.zeros((t0,1))", "img[mask_0].size/n t2 = img[mask_2].size/n t3 = img[mask_3].size/n t4 = img[mask_4].size/n t5 = img[mask_5].size/n", "= np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if", "atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 =", "fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 = np.concatenate((eval5,fives), axis=1) atr_slice = np.vstack([atr_slice,atr5])", "np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 = np.concatenate((eval5,fives), axis=1)", "n = img.shape[dim_prof] t1 = img[mask_1].size/n t0 = img[mask_0].size/n t2 = img[mask_2].size/n t3", "twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2])", "#Função de leitura da imagem e máscaras, e retorna array de pixel como", "= img[mask_4].size/n t5 = img[mask_5].size/n ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice =", "img[mask_4].size/n t5 = img[mask_5].size/n ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones),", "como atributo n = img.shape[dim_prof] t1 = img[mask_1].size/n t0 = img[mask_0].size/n t2 =", "mask_4!=[]: fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice =", "eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros = np.zeros((t0,1)) eval0", "= np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4 eval4 =", "axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4", "np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1)", "t2 = img[mask_2].size/n t3 = img[mask_3].size/n t4 = img[mask_4].size/n t5 = img[mask_5].size/n ones", "if mask_0!=[]: zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice", "atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2 eval2", "zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice])", "mask_4=[], mask_5=[], dim_prof=0): import numpy as np #Função de leitura da imagem e", "t0 = img[mask_0].size/n t2 = img[mask_2].size/n t3 = img[mask_3].size/n t4 = img[mask_4].size/n t5", "mask_3!=[]: threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice =", "= img[mask_2].size/n t3 = img[mask_3].size/n t4 = img[mask_4].size/n t5 = img[mask_5].size/n ones =", "axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2", "t1 = img[mask_1].size/n t0 = img[mask_0].size/n t2 = img[mask_2].size/n t3 = img[mask_3].size/n t4", "= np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros =", "de pixel como atributo n = img.shape[dim_prof] t1 = img[mask_1].size/n t0 = img[mask_0].size/n", "da imagem e máscaras, e retorna array de pixel como atributo n =", "threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3])", "atr2 = np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3 eval3", "np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]:", "img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4", "if mask_3!=[]: threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice", "= np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if", "as np #Função de leitura da imagem e máscaras, e retorna array de", "-*- encoding: utf-8 -*- def extract_feature_pixel(img, mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0):", "if mask_2!=[]: twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice", "= img.shape[dim_prof] t1 = img[mask_1].size/n t0 = img[mask_0].size/n t2 = img[mask_2].size/n t3 =", "e retorna array de pixel como atributo n = img.shape[dim_prof] t1 = img[mask_1].size/n", "= np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos),", "axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3", "img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5", "np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1)", "import numpy as np #Função de leitura da imagem e máscaras, e retorna", "if mask_5!=[]: fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 = np.concatenate((eval5,fives), axis=1) atr_slice", "img[mask_2].size/n t3 = img[mask_3].size/n t4 = img[mask_4].size/n t5 = img[mask_5].size/n ones = np.ones((t1,1))" ]
[ "include, path, re_path from . import views as school_views from django.views.generic import TemplateView", "re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts show districts(Thissur, palakkad...) of a state path('<slug:state>/', school_views.districts,", "palakkad...) # path('', school_views.states, name='states'), # if digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'),", "name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts show districts(Thissur, palakkad...) of a state path('<slug:state>/',", "if districts show districts(Thissur, palakkad...) of a state path('<slug:state>/', school_views.districts, name='districts'), # if", "import views as school_views from django.views.generic import TemplateView app_name = 'schools' urlpatterns =", "views as school_views from django.views.generic import TemplateView app_name = 'schools' urlpatterns = [", "[ # if blank show districts(Thissur, palakkad...) # path('', school_views.states, name='states'), # if", "name='states'), # if digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if", "if digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts show", "from django.urls import include, path, re_path from . import views as school_views from", "view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts show districts(Thissur, palakkad...)", "app_name = 'schools' urlpatterns = [ # if blank show districts(Thissur, palakkad...) #", "school_views.districts, name='districts'), # if character, show sub districts path('<slug:state>/<district>/',school_views.sub_districts, name='sub_districts'), path('<slug:state>/<district>/<sub_district>/',school_views.schools, name='schools'), ]", "django.views.generic import TemplateView app_name = 'schools' urlpatterns = [ # if blank show", "TemplateView app_name = 'schools' urlpatterns = [ # if blank show districts(Thissur, palakkad...)", "name='school_view'), # if districts show districts(Thissur, palakkad...) of a state path('<slug:state>/', school_views.districts, name='districts'),", "show districts(Thissur, palakkad...) of a state path('<slug:state>/', school_views.districts, name='districts'), # if character, show", "palakkad...) of a state path('<slug:state>/', school_views.districts, name='districts'), # if character, show sub districts", "districts(Thissur, palakkad...) # path('', school_views.states, name='states'), # if digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala,", "path('<slug:state>/', school_views.districts, name='districts'), # if character, show sub districts path('<slug:state>/<district>/',school_views.sub_districts, name='sub_districts'), path('<slug:state>/<district>/<sub_district>/',school_views.schools, name='schools'),", "of a state path('<slug:state>/', school_views.districts, name='districts'), # if character, show sub districts path('<slug:state>/<district>/',school_views.sub_districts,", "# if blank show districts(Thissur, palakkad...) # path('', school_views.states, name='states'), # if digit,", "districts show districts(Thissur, palakkad...) of a state path('<slug:state>/', school_views.districts, name='districts'), # if character,", "information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts show districts(Thissur, palakkad...) of a", "a state path('<slug:state>/', school_views.districts, name='districts'), # if character, show sub districts path('<slug:state>/<district>/',school_views.sub_districts, name='sub_districts'),", "state path('<slug:state>/', school_views.districts, name='districts'), # if character, show sub districts path('<slug:state>/<district>/',school_views.sub_districts, name='sub_districts'), path('<slug:state>/<district>/<sub_district>/',school_views.schools,", "re_path from . import views as school_views from django.views.generic import TemplateView app_name =", "import TemplateView app_name = 'schools' urlpatterns = [ # if blank show districts(Thissur,", "# if digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts", "show districts(Thissur, palakkad...) # path('', school_views.states, name='states'), # if digit, view school information", "re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts show districts(Thissur, palakkad...) of a state", ". import views as school_views from django.views.generic import TemplateView app_name = 'schools' urlpatterns", "django.urls import include, path, re_path from . import views as school_views from django.views.generic", "= 'schools' urlpatterns = [ # if blank show districts(Thissur, palakkad...) # path('',", "# path('', school_views.states, name='states'), # if digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view,", "import include, path, re_path from . import views as school_views from django.views.generic import", "if blank show districts(Thissur, palakkad...) # path('', school_views.states, name='states'), # if digit, view", "digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts show districts(Thissur,", "= [ # if blank show districts(Thissur, palakkad...) # path('', school_views.states, name='states'), #", "districts(Thissur, palakkad...) of a state path('<slug:state>/', school_views.districts, name='districts'), # if character, show sub", "# if districts show districts(Thissur, palakkad...) of a state path('<slug:state>/', school_views.districts, name='districts'), #", "'schools' urlpatterns = [ # if blank show districts(Thissur, palakkad...) # path('', school_views.states,", "urlpatterns = [ # if blank show districts(Thissur, palakkad...) # path('', school_views.states, name='states'),", "path, re_path from . import views as school_views from django.views.generic import TemplateView app_name", "as school_views from django.views.generic import TemplateView app_name = 'schools' urlpatterns = [ #", "from django.views.generic import TemplateView app_name = 'schools' urlpatterns = [ # if blank", "school_views.states, name='states'), # if digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), #", "blank show districts(Thissur, palakkad...) # path('', school_views.states, name='states'), # if digit, view school", "path('', school_views.states, name='states'), # if digit, view school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'),", "from . import views as school_views from django.views.generic import TemplateView app_name = 'schools'", "school information re_path(r'^(?P<code>\\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\\d{4}\\w{7})/',school_views.school_view, name='school_view'), # if districts show districts(Thissur, palakkad...) of", "school_views from django.views.generic import TemplateView app_name = 'schools' urlpatterns = [ # if" ]
[ "import _install_application def install_tool(options): version = options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\",", "def install_tool(options): version = options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name,", "= options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name, version, tool_install_dir=install_dir) configure_actions", "install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name, version, tool_install_dir=install_dir) configure_actions = { \"install_galaxy_tool\": install_tool, }", "options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name, version, tool_install_dir=install_dir) configure_actions =", "name = options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name, version, tool_install_dir=install_dir) configure_actions = {", "options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name, version, tool_install_dir=install_dir) configure_actions = { \"install_galaxy_tool\": install_tool,", "cloudbio.galaxy.tools import _install_application def install_tool(options): version = options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir =", "= options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name, version, tool_install_dir=install_dir) configure_actions = { \"install_galaxy_tool\":", "version = options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name, version, tool_install_dir=install_dir)", "from cloudbio.galaxy.tools import _install_application def install_tool(options): version = options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir", "_install_application def install_tool(options): version = options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None)", "install_tool(options): version = options.get(\"galaxy_tool_version\") name = options.get(\"galaxy_tool_name\") install_dir = options.get(\"galaxy_tool_dir\", None) _install_application(name, version," ]
[ "getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name):", "return getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"): if", "\".bin\", name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"): if os.path.isabs(path): return", "return os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(\".cache\",", "getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name): return", "def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name) def", "getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\",", "getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\",", "*path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name)", "def getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name):", "getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"):", "getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"): if os.path.isabs(path):", "\"templates\", *path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\",", "os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(\".cache\", *path)", "getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"): if os.path.isabs(path): return os.path.abspath(path) else: return", "return getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name): return", "def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"): if os.path.isabs(path): return os.path.abspath(path) else:", "import os def getRootPath(): return os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path)", "def getRootPath(): return os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path):", "\"..\", \"..\")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def", "return getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"): if os.path.isabs(path): return os.path.abspath(path) else: return getCurrentAbsPath(os.path.join(os.getcwd(),", "os def getRootPath(): return os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def", "def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return", "getRootPath(): return os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path): return", "os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path)", "getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\",", "\"..\")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path):", "*path) def getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path) def", "def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name) def", "getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"): if os.path.isabs(path): return os.path.abspath(path) else: return getCurrentAbsPath(os.path.join(os.getcwd(), path))", "return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\", *path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\",", "return os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(\".cache\", *path) def getTemplatePath(*path): return getProjectAbsPath(\"cli\", \"templates\",", "name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name) def getCurrentAbsPath(path=\".\"): if os.path.isabs(path): return os.path.abspath(path)", "*path) def getNodeBinPath(name): return getProjectAbsPath(\"node_modules\", \".bin\", name) def getPipEnvBinPath(name): return getProjectAbsPath(\"env\", \"bin\", name)" ]
[ "Project imports from patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply", "to use new functionality. Order of patching *matters* due to namespace reload. \"\"\"", "Force reload so that new query types are imported into namespace reload(import_module('django.db.models.sql')) p.mod('query').auto()", "*matters* due to namespace reload. \"\"\" with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto()", "p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so that new query types are imported into", "import_module # Project imports from patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte():", "from importlib import reload, import_module # Project imports from patchy import patchy default_app_config", "with new CTE implementations, but this is only necessary to use new functionality.", "patching *matters* due to namespace reload. \"\"\" with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto()", "these patches must be updated manually to conform with new CTE implementations, but", "must be updated manually to conform with new CTE implementations, but this is", "conform with new CTE implementations, but this is only necessary to use new", "CTE implementations, but this is only necessary to use new functionality. Order of", "but this is only necessary to use new functionality. Order of patching *matters*", "import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply CTE monkey patches to", "of patching *matters* due to namespace reload. \"\"\" with patchy('django.db.models', 'django_cte') as p:", "this is only necessary to use new functionality. Order of patching *matters* due", "from patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply CTE monkey", "patches to Django. At present these patches must be updated manually to conform", "patch_cte(): \"\"\" Apply CTE monkey patches to Django. At present these patches must", "new CTE implementations, but this is only necessary to use new functionality. Order", "namespace reload. \"\"\" with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force", "importlib import reload, import_module # Project imports from patchy import patchy default_app_config =", "import reload, import_module # Project imports from patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig'", "def patch_cte(): \"\"\" Apply CTE monkey patches to Django. At present these patches", "due to namespace reload. \"\"\" with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto()", "patches must be updated manually to conform with new CTE implementations, but this", "At present these patches must be updated manually to conform with new CTE", "new functionality. Order of patching *matters* due to namespace reload. \"\"\" with patchy('django.db.models',", "Apply CTE monkey patches to Django. At present these patches must be updated", "# Project imports from patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\"", "p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so that new query types are", "'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so that new query", "to Django. At present these patches must be updated manually to conform with", "with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so that", "patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so that new", "functionality. Order of patching *matters* due to namespace reload. \"\"\" with patchy('django.db.models', 'django_cte')", "reload. \"\"\" with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload", "= 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply CTE monkey patches to Django. At present", "to conform with new CTE implementations, but this is only necessary to use", "CTE monkey patches to Django. At present these patches must be updated manually", "patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply CTE monkey patches", "Django. At present these patches must be updated manually to conform with new", "be updated manually to conform with new CTE implementations, but this is only", "reload, import_module # Project imports from patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def", "p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so that new query types are imported", "\"\"\" with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so", "is only necessary to use new functionality. Order of patching *matters* due to", "manually to conform with new CTE implementations, but this is only necessary to", "only necessary to use new functionality. Order of patching *matters* due to namespace", "\"\"\" Apply CTE monkey patches to Django. At present these patches must be", "monkey patches to Django. At present these patches must be updated manually to", "patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply CTE monkey patches to Django.", "to namespace reload. \"\"\" with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() #", "default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply CTE monkey patches to Django. At", "use new functionality. Order of patching *matters* due to namespace reload. \"\"\" with", "implementations, but this is only necessary to use new functionality. Order of patching", "p.mod('sql.subqueries').auto() # Force reload so that new query types are imported into namespace", "imports from patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply CTE", "necessary to use new functionality. Order of patching *matters* due to namespace reload.", "# Force reload so that new query types are imported into namespace reload(import_module('django.db.models.sql'))", "reload so that new query types are imported into namespace reload(import_module('django.db.models.sql')) p.mod('query').auto() p.cls('manager.BaseManager').auto()", "present these patches must be updated manually to conform with new CTE implementations,", "so that new query types are imported into namespace reload(import_module('django.db.models.sql')) p.mod('query').auto() p.cls('manager.BaseManager').auto() p.cls('base.Model').auto()", "'django_cte.apps.DjangoCTEConfig' def patch_cte(): \"\"\" Apply CTE monkey patches to Django. At present these", "updated manually to conform with new CTE implementations, but this is only necessary", "Order of patching *matters* due to namespace reload. \"\"\" with patchy('django.db.models', 'django_cte') as", "as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so that new query types" ]
[ "to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self._get_formated_data_name(), \"", "return \"\".join(cmdstrs) def set_func_name(self, name): self.__func_name = name def get_func_name(self): return self.__func_name class", "def __init__(self): super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name = \"\" @abstractmethod def get_type_category(self):", "to_sdl(self, sdlconsole): pass def set_data_name(self, name): self.__data_name = name def update_clause(self, clause): clause_type_and_name", "sdlconsole): pass def set_data_name(self, name): self.__data_name = name def update_clause(self, clause): clause_type_and_name =", "clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self): return self.__data_name def", "\") \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self,", "def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs = [\"->", "self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name =", "import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name =", "get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \")", "\") \", self._get_formated_data_name(), \" \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return", "FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod", "\"(\", self.get_type_name(), \") \", self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"] for clause_name, clause in", "\"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs", "pass def set_data_name(self, name): self.__data_name = name def update_clause(self, clause): clause_type_and_name = clause.get_type_name()", "return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string = \"\" def append_string(self,", "get_data_name(self): return self.__data_name def get_clauses(self): return self.__clauses def _get_formated_data_name(self): return \"\\\"@\" + self.__data_name", "abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name = \"\"", "= \"\" def append_string(self, string): self.__command_string += string def to_sdl(self, sdlconsole): return self.__command_string", "pass @abstractmethod def get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole): pass def set_data_name(self, name):", "return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name = \"\" @abstractmethod def", "in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self, name): self.__func_name = name def", "for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self, name): self.__func_name", "class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name = \"\" @abstractmethod", "name): self.__data_name = name def update_clause(self, clause): clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name]", "get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string = \"\" def", "def update_clause(self, clause): clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self):", "pass def to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \",", "\"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name = \"\" @abstractmethod def get_type_category(self):", "WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name = \"\" @abstractmethod def", "pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(),", "cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self._get_formated_data_name(), \" \"] for", "super(RawCommand, self).__init__() self.__command_string = \"\" def append_string(self, string): self.__command_string += string def to_sdl(self,", "name): self.__func_name = name def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand,", "def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def", "self.__clauses def _get_formated_data_name(self): return \"\\\"@\" + self.__data_name + \"\\\"\" class CreationCommand(WorldCommand): def __init__(self):", "self.get_type_name(), \") \", self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"] for clause_name, clause in self.get_clauses().items():", "return \"\\\"@\" + self.__data_name + \"\\\"\" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod", "self._get_formated_data_name(), \" \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class", "\") \", self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment())", "cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name = \"\"", "self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return", "= \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole):", "cmd_base from abc import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses =", "@abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole): pass", "= [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"]", "pass @abstractmethod def to_sdl(self, sdlconsole): pass def set_data_name(self, name): self.__data_name = name def", "\"\\\"@\" + self.__data_name + \"\\\"\" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def", "self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs", "@abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs =", "append_string(self, string): self.__command_string += string def to_sdl(self, sdlconsole): return self.__command_string # TODO: core", "@abstractmethod def to_sdl(self, sdlconsole): pass def set_data_name(self, name): self.__data_name = name def update_clause(self,", "def to_sdl(self, sdlconsole): pass def set_data_name(self, name): self.__data_name = name def update_clause(self, clause):", "def get_clauses(self): return self.__clauses def _get_formated_data_name(self): return \"\\\"@\" + self.__data_name + \"\\\"\" class", "+ self.__data_name + \"\\\"\" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self):", "= clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self): return self.__data_name def get_clauses(self):", "self.__data_name def get_clauses(self): return self.__clauses def _get_formated_data_name(self): return \"\\\"@\" + self.__data_name + \"\\\"\"", "{} self.__data_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass @abstractmethod", "def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(),", "cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name = \"\" @abstractmethod", "\" \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand):", "class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name = \"\" @abstractmethod def get_type_category(self): pass", "= \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass @abstractmethod def to_sdl(self,", "self).__init__() self.__command_string = \"\" def append_string(self, string): self.__command_string += string def to_sdl(self, sdlconsole):", "return self.__data_name def get_clauses(self): return self.__clauses def _get_formated_data_name(self): return \"\\\"@\" + self.__data_name +", "get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs = [\"-> \",", "RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string = \"\" def append_string(self, string): self.__command_string +=", "def append_string(self, string): self.__command_string += string def to_sdl(self, sdlconsole): return self.__command_string # TODO:", "super(FunctionCommand, self).__init__() self.__func_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass", "__init__(self): super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name = \"\" @abstractmethod def get_type_category(self): pass", "self.__func_name = name def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__()", "name def update_clause(self, clause): clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause def", "self.__data_name + \"\\\"\" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass", "@abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\",", "update_clause(self, clause): clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self): return", "clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self): return self.__data_name def get_clauses(self): return", "clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self): return self.__data_name def get_clauses(self): return self.__clauses def", "[\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"] for", "clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self, name): self.__func_name =", "super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole):", "def to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self.get_func_name(),", "_get_formated_data_name(self): return \"\\\"@\" + self.__data_name + \"\\\"\" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__()", "abc import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name", "from abc import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses = {}", "<reponame>jasonoscar88/Photon-v2 from . import cmd_base from abc import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self):", "\"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole):", "def __init__(self): super(RawCommand, self).__init__() self.__command_string = \"\" def append_string(self, string): self.__command_string += string", "self.get_type_name(), \") \", self._get_formated_data_name(), \" \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\")", "for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self):", "from . import cmd_base from abc import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand,", "self.get_type_category(), \"(\", self.get_type_name(), \") \", self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"] for clause_name, clause", "def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string = \"\"", "self).__init__() self.__func_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def", "\"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def", "self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self, name): self.__func_name = name def get_func_name(self):", "+ clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self): return self.__data_name def get_clauses(self): return self.__clauses", "self.__clauses = {} self.__data_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self):", "def get_data_name(self): return self.__data_name def get_clauses(self): return self.__clauses def _get_formated_data_name(self): return \"\\\"@\" +", "get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole): pass def set_data_name(self, name): self.__data_name = name", "self.__command_string = \"\" def append_string(self, string): self.__command_string += string def to_sdl(self, sdlconsole): return", "in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name", "\"(\", self.get_type_name(), \") \", self._get_formated_data_name(), \" \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment())", "clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand,", "get_clauses(self): return self.__clauses def _get_formated_data_name(self): return \"\\\"@\" + self.__data_name + \"\\\"\" class CreationCommand(WorldCommand):", "def __init__(self): super(FunctionCommand, self).__init__() self.__func_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def", "name def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string =", "self.__data_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass @abstractmethod def", "cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self, name): self.__func_name = name def get_func_name(self): return", "def _get_formated_data_name(self): return \"\\\"@\" + self.__data_name + \"\\\"\" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand,", "set_func_name(self, name): self.__func_name = name def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self):", "self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string = \"\" def append_string(self, string):", "import cmd_base from abc import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses", "self.get_type_category(), \"(\", self.get_type_name(), \") \", self._get_formated_data_name(), \" \"] for clause_name, clause in self.get_clauses().items():", "clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__()", "CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass", "clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self, name): self.__func_name = name", "self.__clauses[clause_type_and_name] = clause def get_data_name(self): return self.__data_name def get_clauses(self): return self.__clauses def _get_formated_data_name(self):", "= name def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string", "\"\" def append_string(self, string): self.__command_string += string def to_sdl(self, sdlconsole): return self.__command_string #", "\"(\", self._get_formated_data_name(), \") \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs)", "class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self):", "self._get_formated_data_name(), \") \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def", "\"\".join(cmdstrs) def set_func_name(self, name): self.__func_name = name def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand):", "__init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self,", "clause): clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self): return self.__data_name", "+ \"\\\"\" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod", "def to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self._get_formated_data_name(),", ". import cmd_base from abc import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__()", "super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod", "to_sdl(self, sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self.get_func_name(), \"(\",", "cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self.get_func_name(), \"(\", self._get_formated_data_name(), \")", "sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self.get_func_name(), \"(\", self._get_formated_data_name(),", "__init__(self): super(RawCommand, self).__init__() self.__command_string = \"\" def append_string(self, string): self.__command_string += string def", "class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string = \"\" def append_string(self, string): self.__command_string", "@abstractmethod def get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole): pass def set_data_name(self, name): self.__data_name", "string): self.__command_string += string def to_sdl(self, sdlconsole): return self.__command_string # TODO: core commands", "[\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self._get_formated_data_name(), \" \"] for clause_name, clause", "def get_type_category(self): pass @abstractmethod def get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole): pass def", "\", self._get_formated_data_name(), \" \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs)", "self.__data_name = name def update_clause(self, clause): clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] =", "clause def get_data_name(self): return self.__data_name def get_clauses(self): return self.__clauses def _get_formated_data_name(self): return \"\\\"@\"", "\"\\\"\" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod def", "get_type_category(self): pass @abstractmethod def get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole): pass def set_data_name(self,", "\", self.get_type_category(), \"(\", self.get_type_name(), \") \", self._get_formated_data_name(), \" \"] for clause_name, clause in", "cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self, name): self.__func_name = name def get_func_name(self): return self.__func_name", "self).__init__() self.__clauses = {} self.__data_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def", "set_data_name(self, name): self.__data_name = name def update_clause(self, clause): clause_type_and_name = clause.get_type_name() + clause.get_name()", "\", self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\")", "\"] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append(\"\\n\") return \"\".join(cmdstrs) def set_func_name(self, name):", "def set_func_name(self, name): self.__func_name = name def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def", "= clause def get_data_name(self): return self.__data_name def get_clauses(self): return self.__clauses def _get_formated_data_name(self): return", "\", self.get_type_category(), \"(\", self.get_type_name(), \") \", self.get_func_name(), \"(\", self._get_formated_data_name(), \") \"] for clause_name,", "def get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole): pass def set_data_name(self, name): self.__data_name =", "return self.__clauses def _get_formated_data_name(self): return \"\\\"@\" + self.__data_name + \"\\\"\" class CreationCommand(WorldCommand): def", "sdlconsole): cmdstrs = [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self._get_formated_data_name(), \" \"]", "= [\"-> \", self.get_type_category(), \"(\", self.get_type_name(), \") \", self._get_formated_data_name(), \" \"] for clause_name,", "__init__(self): super(FunctionCommand, self).__init__() self.__func_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self):", "def set_data_name(self, name): self.__data_name = name def update_clause(self, clause): clause_type_and_name = clause.get_type_name() +", "= name def update_clause(self, clause): clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause", "self.__func_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self,", "= {} self.__data_name = \"\" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass" ]
[ "42) #Applying Standard scaling to get optimized result sc = StandardScaler() X_train =", "from sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection import", "pickle.dump(rfc, open('model.pkl','wb')) # Loading model to compare the results #model = pickle.load(open('model.pkl','rb')) #print(model.predict([[2,", "SGDClassifier from sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection", "= ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins = bins, labels = group_names) label_quality", "= sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model", "import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2, 6.5, 8) group_names =", "splitting of data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2,", "'good'] wine['quality'] = pd.cut(wine['quality'], bins = bins, labels = group_names) label_quality = LabelEncoder()", "scaling to get optimized result sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test =", "['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins = bins, labels = group_names) label_quality =", "get optimized result sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc", "= 1) y = wine['quality'] #Train and Test splitting of data X_train, X_test,", "#Bad becomes 0 and good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality',", "= RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading", "X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model to disk", "from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2, 6.5, 8)", "y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 42) #Applying Standard", "wine['quality'] #Train and Test splitting of data X_train, X_test, y_train, y_test = train_test_split(X,", "y, test_size = 0.2, random_state = 42) #Applying Standard scaling to get optimized", "wine.drop('quality', axis = 1) y = wine['quality'] #Train and Test splitting of data", "wine=pd.read_csv(location) bins = (2, 6.5, 8) group_names = ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'],", "X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving", "# Loading model to compare the results #model = pickle.load(open('model.pkl','rb')) #print(model.predict([[2, 9, 6]]))", "= label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis = 1) y = wine['quality'] #Train and", "StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2,", "= group_names) label_quality = LabelEncoder() #Bad becomes 0 and good becomes 1 wine['quality']", "Standard scaling to get optimized result sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test", "to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model to compare the results #model =", "group_names = ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins = bins, labels = group_names)", "import RandomForestClassifier from sklearn.linear_model import SGDClassifier from sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing", "# Saving model to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model to compare the", "StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) #", "sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train,", "rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model to disk pickle.dump(rfc, open('model.pkl','wb')) #", "sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import SGDClassifier from sklearn.metrics import confusion_matrix, classification_report from", "location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2, 6.5, 8) group_names = ['bad', 'good'] wine['quality'] =", "= wine.drop('quality', axis = 1) y = wine['quality'] #Train and Test splitting of", "Test splitting of data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size =", "pandas as pd import pickle from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import SGDClassifier", "= pd.cut(wine['quality'], bins = bins, labels = group_names) label_quality = LabelEncoder() #Bad becomes", "bins = (2, 6.5, 8) group_names = ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins", "train_test_split(X, y, test_size = 0.2, random_state = 42) #Applying Standard scaling to get", "X = wine.drop('quality', axis = 1) y = wine['quality'] #Train and Test splitting", "result sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200)", "= bins, labels = group_names) label_quality = LabelEncoder() #Bad becomes 0 and good", "= train_test_split(X, y, test_size = 0.2, random_state = 42) #Applying Standard scaling to", "1 wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis = 1) y = wine['quality']", "RandomForestClassifier from sklearn.linear_model import SGDClassifier from sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing import", "<reponame>jirramounikapriyanka/sample-android-python-ml-app<gh_stars>0 import pandas as pd import pickle from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model", "test_size = 0.2, random_state = 42) #Applying Standard scaling to get optimized result", "pd import pickle from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import SGDClassifier from sklearn.metrics", "= wine['quality'] #Train and Test splitting of data X_train, X_test, y_train, y_test =", "to get optimized result sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test)", "sklearn.linear_model import SGDClassifier from sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder", "import pandas as pd import pickle from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import", "y_test = train_test_split(X, y, test_size = 0.2, random_state = 42) #Applying Standard scaling", "Saving model to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model to compare the results", "from sklearn.linear_model import SGDClassifier from sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler,", "import confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV,", "cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2, 6.5, 8) group_names = ['bad', 'good'] wine['quality']", "= 0.2, random_state = 42) #Applying Standard scaling to get optimized result sc", "open('model.pkl','wb')) # Loading model to compare the results #model = pickle.load(open('model.pkl','rb')) #print(model.predict([[2, 9,", "labels = group_names) label_quality = LabelEncoder() #Bad becomes 0 and good becomes 1", "wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis = 1) y = wine['quality'] #Train", "= LabelEncoder() #Bad becomes 0 and good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X", "bins = bins, labels = group_names) label_quality = LabelEncoder() #Bad becomes 0 and", "sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model to disk pickle.dump(rfc, open('model.pkl','wb'))", "6.5, 8) group_names = ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins = bins, labels", "train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2, 6.5, 8) group_names = ['bad',", "from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import SGDClassifier from sklearn.metrics import confusion_matrix, classification_report", "from sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location)", "0.2, random_state = 42) #Applying Standard scaling to get optimized result sc =", "optimized result sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc =", "pickle from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import SGDClassifier from sklearn.metrics import confusion_matrix,", "(2, 6.5, 8) group_names = ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins = bins,", "disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model to compare the results #model = pickle.load(open('model.pkl','rb'))", "y = wine['quality'] #Train and Test splitting of data X_train, X_test, y_train, y_test", "bins, labels = group_names) label_quality = LabelEncoder() #Bad becomes 0 and good becomes", "LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2, 6.5,", "sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model to", "good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis = 1) y", "= sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model to disk pickle.dump(rfc,", "RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model", "#Applying Standard scaling to get optimized result sc = StandardScaler() X_train = sc.fit_transform(X_train)", "confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score", "pd.cut(wine['quality'], bins = bins, labels = group_names) label_quality = LabelEncoder() #Bad becomes 0", "model to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model to compare the results #model", "X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 42) #Applying", "LabelEncoder() #Bad becomes 0 and good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X =", "#Train and Test splitting of data X_train, X_test, y_train, y_test = train_test_split(X, y,", "sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2, 6.5, 8) group_names", "as pd import pickle from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import SGDClassifier from", "rfc.fit(X_train, y_train) # Saving model to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model to", "sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split,", "GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins = (2, 6.5, 8) group_names = ['bad', 'good']", "and Test splitting of data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size", "= StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train)", "of data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state", "= (2, 6.5, 8) group_names = ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins =", "= 42) #Applying Standard scaling to get optimized result sc = StandardScaler() X_train", "label_quality = LabelEncoder() #Bad becomes 0 and good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality'])", "axis = 1) y = wine['quality'] #Train and Test splitting of data X_train,", "and good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis = 1)", "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 42)", "becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis = 1) y =", "0 and good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis =", "wine['quality'] = pd.cut(wine['quality'], bins = bins, labels = group_names) label_quality = LabelEncoder() #Bad", "import pickle from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import SGDClassifier from sklearn.metrics import", "classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\"", "y_train) # Saving model to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model to compare", "label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis = 1) y = wine['quality'] #Train and Test", "1) y = wine['quality'] #Train and Test splitting of data X_train, X_test, y_train,", "8) group_names = ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins = bins, labels =", "data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state =", "import StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins =", "group_names) label_quality = LabelEncoder() #Bad becomes 0 and good becomes 1 wine['quality'] =", "sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location=\"C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv\" wine=pd.read_csv(location) bins", "random_state = 42) #Applying Standard scaling to get optimized result sc = StandardScaler()", "import SGDClassifier from sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder from", "becomes 0 and good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis" ]
[ "image with other image build tools like `btrfs`, `dnf`, `yum`, `tar`, `ln`, ...", "build_opts\") # When building the BA itself, we need this constant to avoid", "forbidden as dependencies?\" in `bzl/image/feature/new.bzl` for a # detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\",", "the root directory of this source tree. # This combines configurable build-time constants", "name, build_appliance = build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot", "= _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL.", "that is None. def _get_str_cfg(name, default = None, allow_none = False): ret =", "config must set key {}\".format(name)) return ret # Defaults to the empty list", "= build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format(", "Allow `buck -c` overrides from the command-line val = native.read_config(CONFIG_KEY, name) if val", "@mode/dev. # # This is turned into json and loaded by the python", "default = default) if not allow_none and ret == None: fail(\"Repo config must", "for this struct / non-struct split, so we # could easily move everything", "key {}\".format(name)) return ret # Defaults to the empty list if the config", "\"vset-override-\" + sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default =", "rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, )", "sub system. In the future this would be # implemented via a `Shape`", "version set that the # including `image.layer` will use. This would be fixable", "set a default package manager -- in non-default settings, this has to be", "stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), # KEEP THIS DICTIONARY", "BA can support multiple package managers. In the future, if specifying a non-default", "a circular # dependency. # # This feature is exposed a non-`None` magic", "a terrible idea for almost # every application. To create an easy-to-review code", "maintained across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors", "bzl_const_t, layer_feature_suffix = \"__layer-feature\", # Do NOT use this outside of Antlir internals.", "`/__antlir__` paths, see `snapshot_install_dir` doc. `None` uses the default determined by looking up", "and rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer supplied in build_opts\") # When building the", "not set def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2])) if 2", "Antlir's but # similar in spirit. It uses # as the delimiter for", "# This feature is exposed a non-`None` magic constant so that callers #", "if the config is not set def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path =", "methods to provide the precedence order # because the way this is determined", "`yum`, `tar`, `ln`, ... - `rpm_installer`: The build appliance currently does not set", "elements in this list, because we do not know the version set that", "the build mode provided # determines the value of the `.buckconfig` properties used.", "`.buckconfig` is supposed to support # for list configs (but does not, due", "a space-separated dict: k1 v1 k2 v2\") return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t,", "future, if specifying a non-default installer per image proves onerous when using non-default", "return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot", "# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code", "config is not set. # # We use space to separate plurals because", "), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(),", "( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides,", "\"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\")", "_get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is", "= False): \"\"\" Arguments - `name`: The name of the flavor - `build_appliance`:", "== \"true\", # This is a dictionary that allow for looking up configurable", "file in the root directory of this source tree. # This combines configurable", "separator = \" \", default = None): s = _do_not_use_directly_get_cfg(name) return s.split(separator) if", "BAs, we could support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of", "usage of host mounts, # since they are a huge footgun, and are", "idea for almost # every application. To create an easy-to-review code bottleneck, any", "the precedence order # because the way this is determined is *always* based", "python side of the # `nspawn_in_subvol` sub system. In the future this would", "* len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is a space-separated dict: k1 v1 k2 v2\")", "many targets as there are # elements in this list, because we do", "load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\",", "= rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, )", "\"package_style\") == \"inplace\") ) and native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\", # This is", "-c` overrides. # # Buck has a notion of flavors that is separate", "`nspawn_in_subvol` sub system. In the future this would be # implemented via a", "\"Why are `feature`s # forbidden as dependencies?\" in `bzl/image/feature/new.bzl` for a # detailed", "\"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\",", "proves onerous when using non-default BAs, we could support a `default` symlink under", "Antlir internals. See \"Why are `feature`s # forbidden as dependencies?\" in `bzl/image/feature/new.bzl` for", "overrides. # # Buck has a notion of flavors that is separate from", "This is turned into json and loaded by the python side of the", "circular # dependency. # # This feature is exposed a non-`None` magic constant", "dictionary that allow for looking up configurable artifact # targets by a key.", "flavor_to_config = _get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL. # # For each `feature`,", "value of the `.buckconfig` properties used. There is no # way to override", "almost # every application. To create an easy-to-review code bottleneck, any # feature", "# could easily move everything into the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\",", "a non-`None` magic constant so that callers # cannot get confused whether `None`", "def version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg` instead. def", "loaded by the python side of the # `nspawn_in_subvol` sub system. In the", "package managers. In the future, if specifying a non-default installer per image proves", "emit as many targets as there are # elements in this list, because", "any # feature target using a host-mount must be listed in this config.", "# `nspawn_in_subvol` sub system. In the future this would be # implemented via", "and native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\", # This is a dictionary that allow", ") if rpm_installer != \"yum\" and rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer supplied in", "directory of this source tree. # This combines configurable build-time constants (documented on", "`feature`s # forbidden as dependencies?\" in `bzl/image/feature/new.bzl` for a # detailed explanation. PRIVATE_feature_suffix", "similar in spirit. It uses # as the delimiter for per-flavor # config", "\"default BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance: build_appliance =", "_get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path) !=", "\"true\", # This is a dictionary that allow for looking up configurable artifact", "rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides", "= \"antlir\" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\", # Do NOT use", "bottleneck, any # feature target using a host-mount must be listed in this", "v1 k2 v2\") # A layer can turn off version locking # via", "`None` refers to \"no BA\" or \"default BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance", "the # LICENSE file in the root directory of this source tree. #", "list, because we do not know the version set that the # including", "# hostnames can't contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return \"vset-override-\"", "with other image build tools like `btrfs`, `dnf`, `yum`, `tar`, `ln`, ... -", "\"TROLLING TROLLING TROLLING\" return vs_to_path # Defaults to the empty list if the", "if 2 * len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is a space-separated dict: k1 v1", "for key, v in flavor_config.items(): val = native.read_config(config_key, key, None) if val !=", "flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new( repo_config_t, # This one is", "easy-to-review code bottleneck, any # feature target using a host-mount must be listed", "that there's no deep reason for this struct / non-struct split, so we", ") def _get_flavor_to_config(): flavor_to_config = {} for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config", "on REPO_CFG # below), and non-configurable constants that are currently not namespaced. #", "!= None: return val val = do_not_use_repo_cfg.get(name) if val != None: return val", "via a `Shape` so that the typing can be maintained across # bzl/python.", "+ \"#\" + flavor for key, v in flavor_config.items(): val = native.read_config(config_key, key,", "load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST =", "Path to a layer target of a build appliance, containing an installed `rpm_repo_snapshot()`,", "we need this constant to avoid a circular # dependency. # # This", "None): # Allow `buck -c` overrides from the command-line val = native.read_config(CONFIG_KEY, name)", "+ flavor for key, v in flavor_config.items(): val = native.read_config(config_key, key, None) if", "is turned into json and loaded by the python side of the #", "\"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config =", "can support multiple package managers. In the future, if specifying a non-default installer", "notion of flavors that is separate from Antlir's but # similar in spirit.", "build_appliance == None: fail( \"Must be a target path, or a value from", "to the empty list if the config is not set def _get_artifact_key_to_path(): lst", "target path, or a value from `constants.bzl`\", \"build_appliance\", ) if rpm_installer != \"yum\"", "in flavor_config.items(): val = native.read_config(config_key, key, None) if val != None: flavor_config[key] =", "= shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\", # Do NOT use this outside of", "the `nevra` defines its version. - `unsafe_bypass_flavor_check`: Do NOT use. \"\"\" if build_appliance", "# similar in spirit. It uses # as the delimiter for per-flavor #", "= _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts required to execute FB binaries in @mode/dev.", "rpm_installer supplied in build_opts\") # When building the BA itself, we need this", "in the # LICENSE file in the root directory of this source tree.", "including `image.layer` will use. This would be fixable if Buck # supported providers", "is not set def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2])) if", "in @mode/dev. # # This is turned into json and loaded by the", "= unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config = {} for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\",", "`_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default = None): # Allow `buck -c` overrides from", "we'll mutate a copy # Apply `buck -c` overrides. # # Buck has", "based on the build mode # provided, ie `@mode/opt` vs `@mode/dev`. And the", "(default or []) # Defaults to the empty list if the config is", "providers like Bazel does. antlir_linux_flavor = _get_str_cfg(\"antlir_linux_flavor\", allow_none = True), antlir_cell_name = config.get_antlir_cell_name(),", "will generally loudly fail on a config value that is None. def _get_str_cfg(name,", "there are # elements in this list, because we do not know the", "can turn off version locking # via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING", "default = None): s = _do_not_use_directly_get_cfg(name) return s.split(separator) if s else (default or", "None, allow_none = False): ret = _do_not_use_directly_get_cfg(name, default = default) if not allow_none", "The build appliance currently does not set a default package manager -- in", "spirit. It uses # as the delimiter for per-flavor # config options, so", "version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\" Arguments - `name`: The name of", "val = native.read_config(CONFIG_KEY, name) if val != None: return val val = do_not_use_repo_cfg.get(name)", "a space-separated dict: k1 v1 k2 v2\") # A layer can turn off", "affiliates. # # This source code is licensed under the MIT license found", "return val return default # We don't have \"globally required\" configs because code", "Platforms, Inc. and affiliates. # # This source code is licensed under the", "is a space-separated dict: k1 v1 k2 v2\") # A layer can turn", "= None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\" Arguments - `name`: The", "`build_appliance`: Path to a layer target of a build appliance, containing an installed", "to support # for list configs (but does not, due to bugs). def", "None: return val val = do_not_use_repo_cfg.get(name) if val != None: return val return", "off version locking # via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\"", "and non-configurable constants that are currently not namespaced. # # Note that there's", "for per-flavor # config options, so we follow that pattern. config_key = CONFIG_KEY", "provided, ie `@mode/opt` vs `@mode/dev`. And the build mode provided # determines the", "len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is a space-separated dict: k1 v1 k2 v2\") #", "of `nevra` objects (see antlir/bzl/constants.bzl for definition). If rpm with given name to", "`@mode/opt` vs `@mode/dev`. And the build mode provided # determines the value of", "flavor_config.items(): val = native.read_config(config_key, key, None) if val != None: flavor_config[key] = val", "building the BA itself, we need this constant to avoid a circular #", "this source tree. # This combines configurable build-time constants (documented on REPO_CFG #", "# as the delimiter for per-flavor # config options, so we follow that", "set that the # including `image.layer` will use. This would be fixable if", "# At FB, the Antlir team tightly controls the usage of host mounts,", "return default # We don't have \"globally required\" configs because code that requires", "key. artifact = _get_artifact_key_to_path(), # At FB, the Antlir team tightly controls the", "feature target using a host-mount must be listed in this config. host_mounts_allowed_in_targets =", "a huge footgun, and are a terrible idea for almost # every application.", "objects (see antlir/bzl/constants.bzl for definition). If rpm with given name to be installed,", "to the empty list if the config is not set. # # We", "space-separated dict: k1 v1 k2 v2\") # A layer can turn off version", "flavors that is separate from Antlir's but # similar in spirit. It uses", "name, build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check", "val != None: flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG =", "underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target) # Use", "DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance: build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t, name", "build_appliance = build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else", "# paths, and also because that's what `.buckconfig` is supposed to support #", "ret = _do_not_use_directly_get_cfg(name, default = default) if not allow_none and ret == None:", "the future, if specifying a non-default installer per image proves onerous when using", "if build_appliance: build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t, name = name, build_appliance =", "contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target) #", "None): s = _do_not_use_directly_get_cfg(name) return s.split(separator) if s else (default or []) #", "the python side of the # `nspawn_in_subvol` sub system. In the future this", "2 * len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is a space-separated dict: k1 v1 k2", "easily move everything into the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\",", "\"Must be a target path, or a value from `constants.bzl`\", \"build_appliance\", ) if", "source code is licensed under the MIT license found in the # LICENSE", "lst[1::2])) if 2 * len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is a space-separated dict: k1", "_get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path) !=", "!= len(lst): fail(\"antlir.artifact_key_to_path is a space-separated dict: k1 v1 k2 v2\") return key_to_path", "_get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts required to execute FB binaries in @mode/dev. #", "{} for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config) #", "flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL. # #", "non-configurable constants that are currently not namespaced. # # Note that there's no", "BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance: build_appliance = normalize_target(build_appliance)", "if specifying a non-default installer per image proves onerous when using non-default BAs,", "{}\".format(name)) return ret # Defaults to the empty list if the config is", "host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default =", "mounts, # since they are a huge footgun, and are a terrible idea", "# forbidden as dependencies?\" in `bzl/image/feature/new.bzl` for a # detailed explanation. PRIVATE_feature_suffix =", "None. def _get_str_cfg(name, default = None, allow_none = False): ret = _do_not_use_directly_get_cfg(name, default", "key, v in flavor_config.items(): val = native.read_config(config_key, key, None) if val != None:", "- `rpm_repo_snapshot`: List of target or `/__antlir__` paths, see `snapshot_install_dir` doc. `None` uses", "\"\"\" if build_appliance == None: fail( \"Must be a target path, or a", "(but does not, due to bugs). def _get_str_list_cfg(name, separator = \" \", default", "# config options, so we follow that pattern. config_key = CONFIG_KEY + \"#\"", "if Buck # supported providers like Bazel does. antlir_linux_flavor = _get_str_cfg(\"antlir_linux_flavor\", allow_none =", "# # We use space to separate plurals because spaces are not allowed", "is supposed to support # for list configs (but does not, due to", "if val != None: return val val = do_not_use_repo_cfg.get(name) if val != None:", "`tar`, `ln`, ... - `rpm_installer`: The build appliance currently does not set a", "= BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\" Arguments - `name`: The name of the", "`constants.bzl`\", \"build_appliance\", ) if rpm_installer != \"yum\" and rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer", "combines configurable build-time constants (documented on REPO_CFG # below), and non-configurable constants that", "the config is not set def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2],", "build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check =", "a layer target of a build appliance, containing an installed `rpm_repo_snapshot()`, plus an", "struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\",", "uses the default determined by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List", "default) if not allow_none and ret == None: fail(\"Repo config must set key", "`buck -c` overrides. # # Buck has a notion of flavors that is", "\"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST = shape.new(", "!= \"dnf\": fail(\"Unsupported rpm_installer supplied in build_opts\") # When building the BA itself,", "targets as there are # elements in this list, because we do not", "of target or `/__antlir__` paths, see `snapshot_install_dir` doc. `None` uses the default determined", "an easy-to-review code bottleneck, any # feature target using a host-mount must be", "`image.layer` will use. This would be fixable if Buck # supported providers like", "the empty list if the config is not set def _get_version_set_to_path(): lst =", "`nevra` defines its version. - `unsafe_bypass_flavor_check`: Do NOT use. \"\"\" if build_appliance ==", "in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra` objects (see antlir/bzl/constants.bzl for definition). If", "a # config will generally loudly fail on a config value that is", "from `constants.bzl`\", \"build_appliance\", ) if rpm_installer != \"yum\" and rpm_installer != \"dnf\": fail(\"Unsupported", "normalize_target(build_appliance) return shape.new( flavor_config_t, name = name, build_appliance = build_appliance, rpm_installer = rpm_installer,", "THIS DICTIONARY SMALL. # # For each `feature`, we have to emit as", "(see antlir/bzl/constants.bzl for definition). If rpm with given name to be installed, the", "so that the typing can be maintained across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg(", "also because that's what `.buckconfig` is supposed to support # for list configs", "not know the version set that the # including `image.layer` will use. This", "they are a huge footgun, and are a terrible idea for almost #", "REPO_CFG # below), and non-configurable constants that are currently not namespaced. # #", "to be chosen per image, since a BA can support multiple package managers.", "k2 v2\") return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance,", "other image build tools like `btrfs`, `dnf`, `yum`, `tar`, `ln`, ... - `rpm_installer`:", "# A layer can turn off version locking # via `version_set = BZL_CONST.version_set_allow_all_versions`.", "\"dnf\": fail(\"Unsupported rpm_installer supplied in build_opts\") # When building the BA itself, we", "It uses # as the delimiter for per-flavor # config options, so we", "BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\" return vs_to_path # Defaults to the empty", "tightly controls the usage of host mounts, # since they are a huge", "k2 v2\") # A layer can turn off version locking # via `version_set", "definition). If rpm with given name to be installed, the `nevra` defines its", "config_key = CONFIG_KEY + \"#\" + flavor for key, v in flavor_config.items(): val", "= None, rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\" Arguments", "that allow for looking up configurable artifact # targets by a key. artifact", "as many targets as there are # elements in this list, because we", "\"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return", "This combines configurable build-time constants (documented on REPO_CFG # below), and non-configurable constants", "# detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain", "execute FB binaries in @mode/dev. # # This is turned into json and", "Apply `buck -c` overrides. # # Buck has a notion of flavors that", "load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\",", "copy # Apply `buck -c` overrides. # # Buck has a notion of", "NOT use this outside of Antlir internals. See \"Why are `feature`s # forbidden", "rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path,", "constants (documented on REPO_CFG # below), and non-configurable constants that are currently not", "must be listed in this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts", "flavor for key, v in flavor_config.items(): val = native.read_config(config_key, key, None) if val", "supported providers like Bazel does. antlir_linux_flavor = _get_str_cfg(\"antlir_linux_flavor\", allow_none = True), antlir_cell_name =", "native.read_config(config_key, key, None) if val != None: flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config)", "like Bazel does. antlir_linux_flavor = _get_str_cfg(\"antlir_linux_flavor\", allow_none = True), antlir_cell_name = config.get_antlir_cell_name(), )", "# implemented via a `Shape` so that the typing can be maintained across", "_do_not_use_directly_get_cfg(name, default = None): # Allow `buck -c` overrides from the command-line val", "val = do_not_use_repo_cfg.get(name) if val != None: return val return default # We", "ret # Defaults to the empty list if the config is not set.", "because the way this is determined is *always* based on the build mode", "At FB, the Antlir team tightly controls the usage of host mounts, #", "repo_config_t, # This one is not using the access methods to provide the", "def _get_str_list_cfg(name, separator = \" \", default = None): s = _do_not_use_directly_get_cfg(name) return", "if build_appliance == None: fail( \"Must be a target path, or a value", "determines the value of the `.buckconfig` properties used. There is no # way", "config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts required to execute FB binaries", "currently not namespaced. # # Note that there's no deep reason for this", "vs `@mode/dev`. And the build mode provided # determines the value of the", "non-default BAs, we could support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List", "val != None: return val val = do_not_use_repo_cfg.get(name) if val != None: return", "_get_str_cfg(name, default = None, allow_none = False): ret = _do_not_use_directly_get_cfg(name, default = default)", "installer per image proves onerous when using non-default BAs, we could support a", "`rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra` objects (see antlir/bzl/constants.bzl for definition).", "shape.new( flavor_config_t, name = name, build_appliance = build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot =", "To create an easy-to-review code bottleneck, any # feature target using a host-mount", "A layer can turn off version locking # via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions]", "name of the flavor - `build_appliance`: Path to a layer target of a", "new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new( repo_config_t, # This one is not using", "def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides = None, version_set_path =", "when using non-default BAs, we could support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. -", "build_appliance: build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t, name = name, build_appliance = build_appliance,", "= None if build_appliance: build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t, name = name,", "appliance, containing an installed `rpm_repo_snapshot()`, plus an OS image with other image build", "or []) # Defaults to the empty list if the config is not", "that requires a # config will generally loudly fail on a config value", "have to emit as many targets as there are # elements in this", "# This combines configurable build-time constants (documented on REPO_CFG # below), and non-configurable", "List of `nevra` objects (see antlir/bzl/constants.bzl for definition). If rpm with given name", "else (default or []) # Defaults to the empty list if the config", "because spaces are not allowed in target # paths, and also because that's", "if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance: build_appliance = normalize_target(build_appliance) return", "PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain underscores. hostname_for_compiler_in_ba =", "and affiliates. # # This source code is licensed under the MIT license", "must set key {}\".format(name)) return ret # Defaults to the empty list if", "target of a build appliance, containing an installed `rpm_repo_snapshot()`, plus an OS image", "the # `nspawn_in_subvol` sub system. In the future this would be # implemented", "use this outside of Antlir internals. See \"Why are `feature`s # forbidden as", "that pattern. config_key = CONFIG_KEY + \"#\" + flavor for key, v in", "list if the config is not set def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path", "or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default = None): # Allow `buck -c` overrides", "_get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), # KEEP THIS", "`rpm_version_set_overrides`: List of `nevra` objects (see antlir/bzl/constants.bzl for definition). If rpm with given", "every application. To create an easy-to-review code bottleneck, any # feature target using", "value that is None. def _get_str_cfg(name, default = None, allow_none = False): ret", "supposed to support # for list configs (but does not, due to bugs).", "_get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL. # # For each", "shape.new( repo_config_t, # This one is not using the access methods to provide", "\"no BA\" or \"default BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if", "# because the way this is determined is *always* based on the build", "a `Shape` so that the typing can be maintained across # bzl/python. host_mounts_for_repo_artifacts", "This source code is licensed under the MIT license found in the #", "generally loudly fail on a config value that is None. def _get_str_cfg(name, default", "an installed `rpm_repo_snapshot()`, plus an OS image with other image build tools like", "vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is a", "the version set that the # including `image.layer` will use. This would be", "no # way to override this value except to use a different build", "paths, and also because that's what `.buckconfig` is supposed to support # for", "we could support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target", "def _get_str_cfg(name, default = None, allow_none = False): ret = _do_not_use_directly_get_cfg(name, default =", "host mounts, # since they are a huge footgun, and are a terrible", "constants that are currently not namespaced. # # Note that there's no deep", "that the # including `image.layer` will use. This would be fixable if Buck", "plus an OS image with other image build tools like `btrfs`, `dnf`, `yum`,", "# # For each `feature`, we have to emit as many targets as", "Meta Platforms, Inc. and affiliates. # # This source code is licensed under", "allow_none = False): ret = _do_not_use_directly_get_cfg(name, default = default) if not allow_none and", "== DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance: build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t,", "s else (default or []) # Defaults to the empty list if the", "symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target or `/__antlir__` paths, see `snapshot_install_dir`", "def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path)", "return vs_to_path # Defaults to the empty list if the config is not", "None) if val != None: flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config", "= _get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL. # # For each `feature`, we", "specifying a non-default installer per image proves onerous when using non-default BAs, we", "\"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST = shape.new( bzl_const_t,", "len(lst): fail(\"antlir.artifact_key_to_path is a space-separated dict: k1 v1 k2 v2\") return key_to_path def", "In the future, if specifying a non-default installer per image proves onerous when", "the Antlir team tightly controls the usage of host mounts, # since they", "\"build_appliance\", ) if rpm_installer != \"yum\" and rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer supplied", "unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config = {} for flavor, orig_flavor_config in", "on a config value that is None. def _get_str_cfg(name, default = None, allow_none", "a different build mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or (native.read_config(\"python\",", "dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is a space-separated dict:", "== None: fail(\"Repo config must set key {}\".format(name)) return ret # Defaults to", "name = name, build_appliance = build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot)", "overrides from the command-line val = native.read_config(CONFIG_KEY, name) if val != None: return", "but # similar in spirit. It uses # as the delimiter for per-flavor", "set. # # We use space to separate plurals because spaces are not", "in this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts required to execute", "a build appliance, containing an installed `rpm_repo_snapshot()`, plus an OS image with other", "if val != None: return val return default # We don't have \"globally", "layer_feature_suffix = \"__layer-feature\", # Do NOT use this outside of Antlir internals. See", "version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def", "= dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is a space-separated", "per image, since a BA can support multiple package managers. In the future,", "code bottleneck, any # feature target using a host-mount must be listed in", "feature is exposed a non-`None` magic constant so that callers # cannot get", "in `bzl/image/feature/new.bzl` for a # detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\",", "`ln`, ... - `rpm_installer`: The build appliance currently does not set a default", "# # This feature is exposed a non-`None` magic constant so that callers", "!= \"yum\" and rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer supplied in build_opts\") # When", "# We don't have \"globally required\" configs because code that requires a #", "_get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL. #", "\"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY", ") and native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\", # This is a dictionary that", "follow that pattern. config_key = CONFIG_KEY + \"#\" + flavor for key, v", "a config value that is None. def _get_str_cfg(name, default = None, allow_none =", "`rpm_repo_snapshot()`, plus an OS image with other image build tools like `btrfs`, `dnf`,", "has to be chosen per image, since a BA can support multiple package", "Do NOT use this outside of Antlir internals. See \"Why are `feature`s #", "the flavor - `build_appliance`: Path to a layer target of a build appliance,", "native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\", # This is a dictionary that allow for", "plurals because spaces are not allowed in target # paths, and also because", "* len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is a space-separated dict: k1 v1 k2 v2\")", "this is determined is *always* based on the build mode # provided, ie", "(native.read_config(\"python\", \"package_style\") == \"inplace\") ) and native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\", # This", "... - `rpm_installer`: The build appliance currently does not set a default package", "are a terrible idea for almost # every application. To create an easy-to-review", "spaces are not allowed in target # paths, and also because that's what", "*always* based on the build mode # provided, ie `@mode/opt` vs `@mode/dev`. And", "# bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"),", "= native.read_config(config_key, key, None) if val != None: flavor_config[key] = val flavor_to_config[flavor] =", "host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts required to execute FB binaries in", "rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer supplied in build_opts\") # When building the BA", "installed, the `nevra` defines its version. - `unsafe_bypass_flavor_check`: Do NOT use. \"\"\" if", "given name to be installed, the `nevra` defines its version. - `unsafe_bypass_flavor_check`: Do", "_get_artifact_key_to_path(), # At FB, the Antlir team tightly controls the usage of host", "be chosen per image, since a BA can support multiple package managers. In", "be # implemented via a `Shape` so that the typing can be maintained", "if the config is not set def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path =", "s.split(separator) if s else (default or []) # Defaults to the empty list", "is not set def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2])) if", "shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\", # Do NOT use this outside of Antlir", "= _do_not_use_directly_get_cfg(name, default = default) if not allow_none and ret == None: fail(\"Repo", "binaries in @mode/dev. # # This is turned into json and loaded by", "a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target or `/__antlir__` paths,", "non-`None` magic constant so that callers # cannot get confused whether `None` refers", "will use. This would be fixable if Buck # supported providers like Bazel", "to separate plurals because spaces are not allowed in target # paths, and", "= None, allow_none = False): ret = _do_not_use_directly_get_cfg(name, default = default) if not", "application. To create an easy-to-review code bottleneck, any # feature target using a", "= native.read_config(CONFIG_KEY, name) if val != None: return val val = do_not_use_repo_cfg.get(name) if", "to the empty list if the config is not set def _get_version_set_to_path(): lst", "determined by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra` objects", "uses # as the delimiter for per-flavor # config options, so we follow", "can't contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target)", "# This is a dictionary that allow for looking up configurable artifact #", "to be installed, the `nevra` defines its version. - `unsafe_bypass_flavor_check`: Do NOT use.", "# Note that there's no deep reason for this struct / non-struct split,", "\"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE =", "its version. - `unsafe_bypass_flavor_check`: Do NOT use. \"\"\" if build_appliance == None: fail(", "build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance: build_appliance = normalize_target(build_appliance) return shape.new(", "could support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target or", "unsafe_bypass_flavor_check = False): \"\"\" Arguments - `name`: The name of the flavor -", "RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check,", "When building the BA itself, we need this constant to avoid a circular", "a key. artifact = _get_artifact_key_to_path(), # At FB, the Antlir team tightly controls", "host mounts required to execute FB binaries in @mode/dev. # # This is", "allow_none and ret == None: fail(\"Repo config must set key {}\".format(name)) return ret", "dependency. # # This feature is exposed a non-`None` magic constant so that", "for a # detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames", "space-separated dict: k1 v1 k2 v2\") return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs)", "the MIT license found in the # LICENSE file in the root directory", "and also because that's what `.buckconfig` is supposed to support # for list", "\"\"\" Arguments - `name`: The name of the flavor - `build_appliance`: Path to", "rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config = {}", "is separate from Antlir's but # similar in spirit. It uses # as", "provided # determines the value of the `.buckconfig` properties used. There is no", "to execute FB binaries in @mode/dev. # # This is turned into json", "like `btrfs`, `dnf`, `yum`, `tar`, `ln`, ... - `rpm_installer`: The build appliance currently", "a target path, or a value from `constants.bzl`\", \"build_appliance\", ) if rpm_installer !=", "looking up configurable artifact # targets by a key. artifact = _get_artifact_key_to_path(), #", "this outside of Antlir internals. See \"Why are `feature`s # forbidden as dependencies?\"", "# below), and non-configurable constants that are currently not namespaced. # # Note", "doc. `None` uses the default determined by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. -", "TROLLING\" return vs_to_path # Defaults to the empty list if the config is", "see `snapshot_install_dir` doc. `None` uses the default determined by looking up `rpm_installer` in", "if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path =", "(native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or (native.read_config(\"python\", \"package_style\") == \"inplace\") ) and native.read_config(\"antlir\", \"require_repo\",", "flavor_to_config = {} for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\": flavor}", "-c` overrides from the command-line val = native.read_config(CONFIG_KEY, name) if val != None:", "# config will generally loudly fail on a config value that is None.", "the config is not set def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2],", "_get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is", "using a host-mount must be listed in this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), #", "or a value from `constants.bzl`\", \"build_appliance\", ) if rpm_installer != \"yum\" and rpm_installer", "We don't have \"globally required\" configs because code that requires a # config", "if the config is not set. # # We use space to separate", "\"__layer-feature\", # Do NOT use this outside of Antlir internals. See \"Why are", "of a build appliance, containing an installed `rpm_repo_snapshot()`, plus an OS image with", "List of target or `/__antlir__` paths, see `snapshot_install_dir` doc. `None` uses the default", "default package manager -- in non-default settings, this has to be chosen per", "required\" configs because code that requires a # config will generally loudly fail", "\"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\", # Do", "do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll mutate a copy #", "Enumerates host mounts required to execute FB binaries in @mode/dev. # # This", "value except to use a different build mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\")", "the delimiter for per-flavor # config options, so we follow that pattern. config_key", "BA\" or \"default BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance:", "\" \", default = None): s = _do_not_use_directly_get_cfg(name) return s.split(separator) if s else", "# supported providers like Bazel does. antlir_linux_flavor = _get_str_cfg(\"antlir_linux_flavor\", allow_none = True), antlir_cell_name", "SMALL. # # For each `feature`, we have to emit as many targets", "to \"no BA\" or \"default BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None", "does not set a default package manager -- in non-default settings, this has", "def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot = None,", "default = None): # Allow `buck -c` overrides from the command-line val =", "used. There is no # way to override this value except to use", "for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll", "load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\"", "terrible idea for almost # every application. To create an easy-to-review code bottleneck,", "# We use space to separate plurals because spaces are not allowed in", "# since they are a huge footgun, and are a terrible idea for", "to a layer target of a build appliance, containing an installed `rpm_repo_snapshot()`, plus", "rpm_installer != \"yum\" and rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer supplied in build_opts\") #", "rpm with given name to be installed, the `nevra` defines its version. -", "\"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\"", "build mode # provided, ie `@mode/opt` vs `@mode/dev`. And the build mode provided", "because code that requires a # config will generally loudly fail on a", "set key {}\".format(name)) return ret # Defaults to the empty list if the", "None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\" Arguments - `name`: The name", "a non-default installer per image proves onerous when using non-default BAs, we could", "v2\") # A layer can turn off version locking # via `version_set =", "key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot =", "flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new( repo_config_t, #", "flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll mutate a copy # Apply `buck", "separate plurals because spaces are not allowed in target # paths, and also", "None: flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new( repo_config_t,", "itself, we need this constant to avoid a circular # dependency. # #", "key, None) if val != None: flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return", "If rpm with given name to be installed, the `nevra` defines its version.", "hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target) # Use `_get_str_cfg`", "Use `_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default = None): # Allow `buck", "return s.split(separator) if s else (default or []) # Defaults to the empty", "<reponame>facebookincubator/fs_image # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source", "of Antlir internals. See \"Why are `feature`s # forbidden as dependencies?\" in `bzl/image/feature/new.bzl`", "the access methods to provide the precedence order # because the way this", "# This source code is licensed under the MIT license found in the", "# KEEP THIS DICTIONARY SMALL. # # For each `feature`, we have to", "name) if val != None: return val val = do_not_use_repo_cfg.get(name) if val !=", "exposed a non-`None` magic constant so that callers # cannot get confused whether", "\"#\" + flavor for key, v in flavor_config.items(): val = native.read_config(config_key, key, None)", "multiple package managers. In the future, if specifying a non-default installer per image", "= _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), # KEEP", "since a BA can support multiple package managers. In the future, if specifying", "We use space to separate plurals because spaces are not allowed in target", "return val val = do_not_use_repo_cfg.get(name) if val != None: return val return default", "\"yum\" and rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer supplied in build_opts\") # When building", "in build_opts\") # When building the BA itself, we need this constant to", "# dependency. # # This feature is exposed a non-`None` magic constant so", "v in flavor_config.items(): val = native.read_config(config_key, key, None) if val != None: flavor_config[key]", "struct / non-struct split, so we # could easily move everything into the", "the empty list if the config is not set. # # We use", "the command-line val = native.read_config(CONFIG_KEY, name) if val != None: return val val", "in target # paths, and also because that's what `.buckconfig` is supposed to", "by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra` objects (see", "looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra` objects (see antlir/bzl/constants.bzl", "locking # via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\" return vs_to_path", "a value from `constants.bzl`\", \"build_appliance\", ) if rpm_installer != \"yum\" and rpm_installer !=", "build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t, name = name, build_appliance = build_appliance, rpm_installer", "Inc. and affiliates. # # This source code is licensed under the MIT", "across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors =", "a # detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't", "lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path) != len(lst):", "is determined is *always* based on the build mode # provided, ie `@mode/opt`", "below), and non-configurable constants that are currently not namespaced. # # Note that", "artifact = _get_artifact_key_to_path(), # At FB, the Antlir team tightly controls the usage", "In the future this would be # implemented via a `Shape` so that", "= new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new( repo_config_t, # This one is not", "rpm_repo_snapshot = None, rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\"", "`Shape` so that the typing can be maintained across # bzl/python. host_mounts_for_repo_artifacts =", "native.read_config(CONFIG_KEY, name) if val != None: return val val = do_not_use_repo_cfg.get(name) if val", "`buck -c` overrides from the command-line val = native.read_config(CONFIG_KEY, name) if val !=", "an OS image with other image build tools like `btrfs`, `dnf`, `yum`, `tar`,", "the future this would be # implemented via a `Shape` so that the", "avoid a circular # dependency. # # This feature is exposed a non-`None`", "sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default = None): #", "k1 v1 k2 v2\") return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config(", "mounts required to execute FB binaries in @mode/dev. # # This is turned", "# Buck has a notion of flavors that is separate from Antlir's but", "False): ret = _do_not_use_directly_get_cfg(name, default = default) if not allow_none and ret ==", "), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config():", "different build mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or (native.read_config(\"python\", \"package_style\")", "be fixable if Buck # supported providers like Bazel does. antlir_linux_flavor = _get_str_cfg(\"antlir_linux_flavor\",", "and ret == None: fail(\"Repo config must set key {}\".format(name)) return ret #", "to override this value except to use a different build mode. artifacts_require_repo =", "outside of Antlir internals. See \"Why are `feature`s # forbidden as dependencies?\" in", "`_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default = None): # Allow `buck -c`", "one is not using the access methods to provide the precedence order #", "non-struct split, so we # could easily move everything into the struct. #", "This is a dictionary that allow for looking up configurable artifact # targets", "# This is turned into json and loaded by the python side of", "use space to separate plurals because spaces are not allowed in target #", "not set. # # We use space to separate plurals because spaces are", "v2\") return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance, rpm_installer,", "bugs). def _get_str_list_cfg(name, separator = \" \", default = None): s = _do_not_use_directly_get_cfg(name)", "# # This source code is licensed under the MIT license found in", "snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path", "license found in the # LICENSE file in the root directory of this", "def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path)", "And the build mode provided # determines the value of the `.buckconfig` properties", "typing can be maintained across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available", "# Use `_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default = None): # Allow", "\"antlir\" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\", # Do NOT use this", "we have to emit as many targets as there are # elements in", "the usage of host mounts, # since they are a huge footgun, and", "{}).items(): flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll mutate a copy # Apply", "flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), #", "turn off version locking # via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING", "== \"shared\") or (native.read_config(\"python\", \"package_style\") == \"inplace\") ) and native.read_config(\"antlir\", \"require_repo\", \"true\") ==", "artifact # targets by a key. artifact = _get_artifact_key_to_path(), # At FB, the", "or \"default BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance: build_appliance", "new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions,", "found in the # LICENSE file in the root directory of this source", "whether `None` refers to \"no BA\" or \"default BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE:", "rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ),", "= ( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or (native.read_config(\"python\", \"package_style\") == \"inplace\") ) and", "required to execute FB binaries in @mode/dev. # # This is turned into", "support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target or `/__antlir__`", "== None: fail( \"Must be a target path, or a value from `constants.bzl`\",", "configurable artifact # targets by a key. artifact = _get_artifact_key_to_path(), # At FB,", "via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\" return vs_to_path # Defaults", "v1 k2 v2\") return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config( name,", "licensed under the MIT license found in the # LICENSE file in the", "is *always* based on the build mode # provided, ie `@mode/opt` vs `@mode/dev`.", "default = None, allow_none = False): ret = _do_not_use_directly_get_cfg(name, default = default) if", "of the `.buckconfig` properties used. There is no # way to override this", "set def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2 *", "for list configs (but does not, due to bugs). def _get_str_list_cfg(name, separator =", "- `unsafe_bypass_flavor_check`: Do NOT use. \"\"\" if build_appliance == None: fail( \"Must be", "empty list if the config is not set def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\")", "so that callers # cannot get confused whether `None` refers to \"no BA\"", "return flavor_to_config REPO_CFG = shape.new( repo_config_t, # This one is not using the", "dependencies?\" in `bzl/image/feature/new.bzl` for a # detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions =", "target or `/__antlir__` paths, see `snapshot_install_dir` doc. `None` uses the default determined by", "empty list if the config is not set. # # We use space", "= rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config =", "This would be fixable if Buck # supported providers like Bazel does. antlir_linux_flavor", "provide the precedence order # because the way this is determined is *always*", "return ret # Defaults to the empty list if the config is not", "internals. See \"Why are `feature`s # forbidden as dependencies?\" in `bzl/image/feature/new.bzl` for a", "`feature`, we have to emit as many targets as there are # elements", "constant so that callers # cannot get confused whether `None` refers to \"no", "Do NOT use. \"\"\" if build_appliance == None: fail( \"Must be a target", "MIT license found in the # LICENSE file in the root directory of", "\"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\")", "a copy # Apply `buck -c` overrides. # # Buck has a notion", "has a notion of flavors that is separate from Antlir's but # similar", "instead. def _do_not_use_directly_get_cfg(name, default = None): # Allow `buck -c` overrides from the", "val val = do_not_use_repo_cfg.get(name) if val != None: return val return default #", "up configurable artifact # targets by a key. artifact = _get_artifact_key_to_path(), # At", "# # This is turned into json and loaded by the python side", "`RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target or `/__antlir__` paths, see `snapshot_install_dir` doc. `None`", "See \"Why are `feature`s # forbidden as dependencies?\" in `bzl/image/feature/new.bzl` for a #", "BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\", # Do NOT use this outside", "# Do NOT use this outside of Antlir internals. See \"Why are `feature`s", "list if the config is not set def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path", "manager -- in non-default settings, this has to be chosen per image, since", "is not set. # # We use space to separate plurals because spaces", "- `name`: The name of the flavor - `build_appliance`: Path to a layer", "because we do not know the version set that the # including `image.layer`", "in the root directory of this source tree. # This combines configurable build-time", "in non-default settings, this has to be chosen per image, since a BA", "build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR,", "\"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check =", "know the version set that the # including `image.layer` will use. This would", "# via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\" return vs_to_path #", "if 2 * len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is a space-separated dict: k1 v1", "lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path) != len(lst):", "fixable if Buck # supported providers like Bazel does. antlir_linux_flavor = _get_str_cfg(\"antlir_linux_flavor\", allow_none", "determined is *always* based on the build mode # provided, ie `@mode/opt` vs", "no deep reason for this struct / non-struct split, so we # could", "the default determined by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of", "# every application. To create an easy-to-review code bottleneck, any # feature target", "that the typing can be maintained across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\",", "This one is not using the access methods to provide the precedence order", "with given name to be installed, the `nevra` defines its version. - `unsafe_bypass_flavor_check`:", "flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll mutate", "rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config", "OS image with other image build tools like `btrfs`, `dnf`, `yum`, `tar`, `ln`,", "shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides = None,", "NOT use. \"\"\" if build_appliance == None: fail( \"Must be a target path,", "DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\",", "this constant to avoid a circular # dependency. # # This feature is", "not set a default package manager -- in non-default settings, this has to", "path, or a value from `constants.bzl`\", \"build_appliance\", ) if rpm_installer != \"yum\" and", "REPO_CFG = shape.new( repo_config_t, # This one is not using the access methods", "None: fail(\"Repo config must set key {}\".format(name)) return ret # Defaults to the", "order # because the way this is determined is *always* based on the", "constant to avoid a circular # dependency. # # This feature is exposed", "could easily move everything into the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\")", "= \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\",", "the typing can be maintained across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ),", "build_appliance = None if build_appliance: build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t, name =", ") def version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg` instead.", "val return default # We don't have \"globally required\" configs because code that", "CONFIG_KEY = \"antlir\" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\", # Do NOT", "config value that is None. def _get_str_cfg(name, default = None, allow_none = False):", "confused whether `None` refers to \"no BA\" or \"default BA\". if build_appliance ==", "per-flavor # config options, so we follow that pattern. config_key = CONFIG_KEY +", "BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\" Arguments - `name`: The name of the flavor", "CONFIG_KEY + \"#\" + flavor for key, v in flavor_config.items(): val = native.read_config(config_key,", "for definition). If rpm with given name to be installed, the `nevra` defines", "flavor_config.update(orig_flavor_config) # we'll mutate a copy # Apply `buck -c` overrides. # #", "`rpm_installer`: The build appliance currently does not set a default package manager --", "= \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target):", "since they are a huge footgun, and are a terrible idea for almost", "delimiter for per-flavor # config options, so we follow that pattern. config_key =", "= normalize_target(build_appliance) return shape.new( flavor_config_t, name = name, build_appliance = build_appliance, rpm_installer =", "# for list configs (but does not, due to bugs). def _get_str_list_cfg(name, separator", "`rpm_repo_snapshot`: List of target or `/__antlir__` paths, see `snapshot_install_dir` doc. `None` uses the", "installed `rpm_repo_snapshot()`, plus an OS image with other image build tools like `btrfs`,", "in spirit. It uses # as the delimiter for per-flavor # config options,", "build appliance, containing an installed `rpm_repo_snapshot()`, plus an OS image with other image", "This feature is exposed a non-`None` magic constant so that callers # cannot", "else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check", "= \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix = \"__layer-feature\", #", "code is licensed under the MIT license found in the # LICENSE file", "For each `feature`, we have to emit as many targets as there are", "source tree. # This combines configurable build-time constants (documented on REPO_CFG # below),", "controls the usage of host mounts, # since they are a huge footgun,", "to emit as many targets as there are # elements in this list,", "is licensed under the MIT license found in the # LICENSE file in", "There is no # way to override this value except to use a", "space to separate plurals because spaces are not allowed in target # paths,", "- `rpm_version_set_overrides`: List of `nevra` objects (see antlir/bzl/constants.bzl for definition). If rpm with", "= dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is a space-separated", "antlir/bzl/constants.bzl for definition). If rpm with given name to be installed, the `nevra`", "if rpm_installer != \"yum\" and rpm_installer != \"dnf\": fail(\"Unsupported rpm_installer supplied in build_opts\")", "# # Buck has a notion of flavors that is separate from Antlir's", "version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config = {} for", "override this value except to use a different build mode. artifacts_require_repo = (", "is a dictionary that allow for looking up configurable artifact # targets by", "default determined by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra`", "this has to be chosen per image, since a BA can support multiple", "root directory of this source tree. # This combines configurable build-time constants (documented", "access methods to provide the precedence order # because the way this is", "targets by a key. artifact = _get_artifact_key_to_path(), # At FB, the Antlir team", "package manager -- in non-default settings, this has to be chosen per image,", "does not, due to bugs). def _get_str_list_cfg(name, separator = \" \", default =", "configurable build-time constants (documented on REPO_CFG # below), and non-configurable constants that are", "default # We don't have \"globally required\" configs because code that requires a", "\"globally required\" configs because code that requires a # config will generally loudly", "that's what `.buckconfig` is supposed to support # for list configs (but does", "rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\" Arguments - `name`:", "= _do_not_use_directly_get_cfg(name) return s.split(separator) if s else (default or []) # Defaults to", "config options, so we follow that pattern. config_key = CONFIG_KEY + \"#\" +", "as dependencies?\" in `bzl/image/feature/new.bzl` for a # detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions", "not using the access methods to provide the precedence order # because the", "# This one is not using the access methods to provide the precedence", "on the build mode # provided, ie `@mode/opt` vs `@mode/dev`. And the build", "# targets by a key. artifact = _get_artifact_key_to_path(), # At FB, the Antlir", "the way this is determined is *always* based on the build mode #", "of the # `nspawn_in_subvol` sub system. In the future this would be #", "use a different build mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or", "# determines the value of the `.buckconfig` properties used. There is no #", "and loaded by the python side of the # `nspawn_in_subvol` sub system. In", "= _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path", "properties used. There is no # way to override this value except to", "are # elements in this list, because we do not know the version", "we # could easily move everything into the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\")", "return shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides =", "the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\",", "list configs (but does not, due to bugs). def _get_str_list_cfg(name, separator = \"", "False): \"\"\" Arguments - `name`: The name of the flavor - `build_appliance`: Path", "ie `@mode/opt` vs `@mode/dev`. And the build mode provided # determines the value", "# elements in this list, because we do not know the version set", "build appliance currently does not set a default package manager -- in non-default", "using the access methods to provide the precedence order # because the way", "would be fixable if Buck # supported providers like Bazel does. antlir_linux_flavor =", ") ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def", "so we follow that pattern. config_key = CONFIG_KEY + \"#\" + flavor for", "command-line val = native.read_config(CONFIG_KEY, name) if val != None: return val val =", "supplied in build_opts\") # When building the BA itself, we need this constant", "# we'll mutate a copy # Apply `buck -c` overrides. # # Buck", "None: fail( \"Must be a target path, or a value from `constants.bzl`\", \"build_appliance\",", "load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix", "# Defaults to the empty list if the config is not set. #", "fail(\"antlir.version_set_to_path is a space-separated dict: k1 v1 k2 v2\") # A layer can", "rpm_installer = rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer,", "requires a # config will generally loudly fail on a config value that", "a BA can support multiple package managers. In the future, if specifying a", "= default) if not allow_none and ret == None: fail(\"Repo config must set", "of the flavor - `build_appliance`: Path to a layer target of a build", "get confused whether `None` refers to \"no BA\" or \"default BA\". if build_appliance", "the `.buckconfig` properties used. There is no # way to override this value", "= _get_str_cfg(\"flavor_default\"), flavor_to_config = _get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL. # # For", "= val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new( repo_config_t, # This", "paths, see `snapshot_install_dir` doc. `None` uses the default determined by looking up `rpm_installer`", "is not using the access methods to provide the precedence order # because", "onerous when using non-default BAs, we could support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`.", "vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\" return vs_to_path # Defaults to the empty list", "if not allow_none and ret == None: fail(\"Repo config must set key {}\".format(name))", "turned into json and loaded by the python side of the # `nspawn_in_subvol`", "configs (but does not, due to bugs). def _get_str_list_cfg(name, separator = \" \",", "# provided, ie `@mode/opt` vs `@mode/dev`. And the build mode provided # determines", "`dnf`, `yum`, `tar`, `ln`, ... - `rpm_installer`: The build appliance currently does not", "per image proves onerous when using non-default BAs, we could support a `default`", "**kwargs) def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides = None, version_set_path", "fail(\"Unsupported rpm_installer supplied in build_opts\") # When building the BA itself, we need", "config is not set def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2]))", "are currently not namespaced. # # Note that there's no deep reason for", "= BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\" return vs_to_path # Defaults to the", "\"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", )", "= name, build_appliance = build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if", "tree. # This combines configurable build-time constants (documented on REPO_CFG # below), and", "if val != None: flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG", "allow for looking up configurable artifact # targets by a key. artifact =", "# # Note that there's no deep reason for this struct / non-struct", "if s else (default or []) # Defaults to the empty list if", "`.buckconfig` properties used. There is no # way to override this value except", "ret == None: fail(\"Repo config must set key {}\".format(name)) return ret # Defaults", "up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra` objects (see antlir/bzl/constants.bzl for", "the # including `image.layer` will use. This would be fixable if Buck #", "{\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll mutate a copy # Apply `buck -c` overrides.", "have \"globally required\" configs because code that requires a # config will generally", "under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target or `/__antlir__` paths, see `snapshot_install_dir` doc.", "unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config = {} for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items():", "= False): ret = _do_not_use_directly_get_cfg(name, default = default) if not allow_none and ret", "fail( \"Must be a target path, or a value from `constants.bzl`\", \"build_appliance\", )", "code that requires a # config will generally loudly fail on a config", "mutate a copy # Apply `buck -c` overrides. # # Buck has a", "load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE", "implemented via a `Shape` so that the typing can be maintained across #", "# feature target using a host-mount must be listed in this config. host_mounts_allowed_in_targets", "not allow_none and ret == None: fail(\"Repo config must set key {}\".format(name)) return", "_get_flavor_to_config(): flavor_to_config = {} for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\":", "currently does not set a default package manager -- in non-default settings, this", "this would be # implemented via a `Shape` so that the typing can", "a default package manager -- in non-default settings, this has to be chosen", "future this would be # implemented via a `Shape` so that the typing", "= version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config = {} for flavor,", "use. This would be fixable if Buck # supported providers like Bazel does.", "host-mount must be listed in this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host", "= \"__layer-feature\", # Do NOT use this outside of Antlir internals. See \"Why", "Note that there's no deep reason for this struct / non-struct split, so", "# way to override this value except to use a different build mode.", "using non-default BAs, we could support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`:", "for looking up configurable artifact # targets by a key. artifact = _get_artifact_key_to_path(),", "Defaults to the empty list if the config is not set. # #", "image build tools like `btrfs`, `dnf`, `yum`, `tar`, `ln`, ... - `rpm_installer`: The", "separate from Antlir's but # similar in spirit. It uses # as the", "build mode provided # determines the value of the `.buckconfig` properties used. There", "`unsafe_bypass_flavor_check`: Do NOT use. \"\"\" if build_appliance == None: fail( \"Must be a", "a host-mount must be listed in this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates", "chosen per image, since a BA can support multiple package managers. In the", "would be # implemented via a `Shape` so that the typing can be", "\"flavor_config_t\", \"nevra_t\", \"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY =", "fail(\"Repo config must set key {}\".format(name)) return ret # Defaults to the empty", "# cannot get confused whether `None` refers to \"no BA\" or \"default BA\".", "The name of the flavor - `build_appliance`: Path to a layer target of", "return \"vset-override-\" + sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default", "k1 v1 k2 v2\") # A layer can turn off version locking #", "build tools like `btrfs`, `dnf`, `yum`, `tar`, `ln`, ... - `rpm_installer`: The build", "detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain underscores.", "from the command-line val = native.read_config(CONFIG_KEY, name) if val != None: return val", "None: return val return default # We don't have \"globally required\" configs because", "are not allowed in target # paths, and also because that's what `.buckconfig`", "empty list if the config is not set def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\")", "# Enumerates host mounts required to execute FB binaries in @mode/dev. # #", "the config is not set. # # We use space to separate plurals", "= {\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll mutate a copy # Apply `buck -c`", "image proves onerous when using non-default BAs, we could support a `default` symlink", "target # paths, and also because that's what `.buckconfig` is supposed to support", "be a target path, or a value from `constants.bzl`\", \"build_appliance\", ) if rpm_installer", "support # for list configs (but does not, due to bugs). def _get_str_list_cfg(name,", "or (native.read_config(\"python\", \"package_style\") == \"inplace\") ) and native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\", #", "that callers # cannot get confused whether `None` refers to \"no BA\" or", "val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new( repo_config_t, # This one", "configs because code that requires a # config will generally loudly fail on", "to bugs). def _get_str_list_cfg(name, separator = \" \", default = None): s =", "= None): s = _do_not_use_directly_get_cfg(name) return s.split(separator) if s else (default or [])", "settings, this has to be chosen per image, since a BA can support", "None if build_appliance: build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t, name = name, build_appliance", "val = native.read_config(config_key, key, None) if val != None: flavor_config[key] = val flavor_to_config[flavor]", "hostnames can't contain underscores. hostname_for_compiler_in_ba = \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return \"vset-override-\" +", "not allowed in target # paths, and also because that's what `.buckconfig` is", "s = _do_not_use_directly_get_cfg(name) return s.split(separator) if s else (default or []) # Defaults", "layer can turn off version locking # via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] =", "version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config = {} for flavor, orig_flavor_config", "build mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or (native.read_config(\"python\", \"package_style\") ==", "- `build_appliance`: Path to a layer target of a build appliance, containing an", "= _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path", "\"require_repo\", \"true\") == \"true\", # This is a dictionary that allow for looking", "\"true\") == \"true\", # This is a dictionary that allow for looking up", "`default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target or `/__antlir__` paths, see", "and are a terrible idea for almost # every application. To create an", "is no # way to override this value except to use a different", "image, since a BA can support multiple package managers. In the future, if", "!= None: flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new(", "_get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"), flavor_to_config", "flavor_config_t, name = name, build_appliance = build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot = (", "/ non-struct split, so we # could easily move everything into the struct.", "= None): # Allow `buck -c` overrides from the command-line val = native.read_config(CONFIG_KEY,", "do_not_use_repo_cfg.get(name) if val != None: return val return default # We don't have", "need this constant to avoid a circular # dependency. # # This feature", "mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or (native.read_config(\"python\", \"package_style\") == \"inplace\")", "= CONFIG_KEY + \"#\" + flavor for key, v in flavor_config.items(): val =", "\"shared\") or (native.read_config(\"python\", \"package_style\") == \"inplace\") ) and native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\",", "by a key. artifact = _get_artifact_key_to_path(), # At FB, the Antlir team tightly", "flavor - `build_appliance`: Path to a layer target of a build appliance, containing", "FB, the Antlir team tightly controls the usage of host mounts, # since", "by the python side of the # `nspawn_in_subvol` sub system. In the future", "rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False):", "team tightly controls the usage of host mounts, # since they are a", "containing an installed `rpm_repo_snapshot()`, plus an OS image with other image build tools", "non-default installer per image proves onerous when using non-default BAs, we could support", "LICENSE file in the root directory of this source tree. # This combines", "bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default", "of flavors that is separate from Antlir's but # similar in spirit. It", "config is not set def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2]))", "a dictionary that allow for looking up configurable artifact # targets by a", "appliance currently does not set a default package manager -- in non-default settings,", "# Apply `buck -c` overrides. # # Buck has a notion of flavors", "be maintained across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"),", "there's no deep reason for this struct / non-struct split, so we #", "# Defaults to the empty list if the config is not set def", "FB binaries in @mode/dev. # # This is turned into json and loaded", "this struct / non-struct split, so we # could easily move everything into", "# including `image.layer` will use. This would be fixable if Buck # supported", "deep reason for this struct / non-struct split, so we # could easily", "version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name,", "# For each `feature`, we have to emit as many targets as there", "is None. def _get_str_cfg(name, default = None, allow_none = False): ret = _do_not_use_directly_get_cfg(name,", "# Allow `buck -c` overrides from the command-line val = native.read_config(CONFIG_KEY, name) if", "don't have \"globally required\" configs because code that requires a # config will", "into json and loaded by the python side of the # `nspawn_in_subvol` sub", "Defaults to the empty list if the config is not set def _get_version_set_to_path():", "for almost # every application. To create an easy-to-review code bottleneck, any #", "orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll mutate a", "pattern. config_key = CONFIG_KEY + \"#\" + flavor for key, v in flavor_config.items():", "[]) # Defaults to the empty list if the config is not set", "explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", # hostnames can't contain underscores. hostname_for_compiler_in_ba", "of host mounts, # since they are a huge footgun, and are a", "into the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\",", "= {} for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config)", "None, rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): \"\"\" Arguments -", "`bzl/image/feature/new.bzl` for a # detailed explanation. PRIVATE_feature_suffix = \"_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN\", version_set_allow_all_versions = \"__VERSION_SET_ALLOW_ALL_VERSIONS__\", #", "def _do_not_use_directly_get_cfg(name, default = None): # Allow `buck -c` overrides from the command-line", "`name`: The name of the flavor - `build_appliance`: Path to a layer target", "# load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\", \"bzl_const_t\", \"flavor_config_t\", \"nevra_t\", \"repo_config_t\")", "cannot get confused whether `None` refers to \"no BA\" or \"default BA\". if", "under the MIT license found in the # LICENSE file in the root", "except to use a different build mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\") ==", "huge footgun, and are a terrible idea for almost # every application. To", "the value of the `.buckconfig` properties used. There is no # way to", "due to bugs). def _get_str_list_cfg(name, separator = \" \", default = None): s", "that are currently not namespaced. # # Note that there's no deep reason", "to use a different build mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\")", "magic constant so that callers # cannot get confused whether `None` refers to", "refers to \"no BA\" or \"default BA\". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance =", "DICTIONARY SMALL. # # For each `feature`, we have to emit as many", "in this list, because we do not know the version set that the", "= _get_artifact_key_to_path(), # At FB, the Antlir team tightly controls the usage of", "# LICENSE file in the root directory of this source tree. # This", "side of the # `nspawn_in_subvol` sub system. In the future this would be", "_do_not_use_directly_get_cfg(name, default = default) if not allow_none and ret == None: fail(\"Repo config", "set def _get_artifact_key_to_path(): lst = _get_str_list_cfg(\"artifact_key_to_path\") key_to_path = dict(zip(lst[::2], lst[1::2])) if 2 *", "\"inplace\") ) and native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\", # This is a dictionary", "_get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL. # # For each `feature`, we have", "dict: k1 v1 k2 v2\") return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def", "`@mode/dev`. And the build mode provided # determines the value of the `.buckconfig`", "lst[1::2])) if 2 * len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is a space-separated dict: k1", "vs_to_path # Defaults to the empty list if the config is not set", "non-default settings, this has to be chosen per image, since a BA can", "namespaced. # # Note that there's no deep reason for this struct /", "tools like `btrfs`, `dnf`, `yum`, `tar`, `ln`, ... - `rpm_installer`: The build appliance", "managers. In the future, if specifying a non-default installer per image proves onerous", "flavor} flavor_config.update(orig_flavor_config) # we'll mutate a copy # Apply `buck -c` overrides. #", "mode # provided, ie `@mode/opt` vs `@mode/dev`. And the build mode provided #", "`version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\" return vs_to_path # Defaults to", "-- in non-default settings, this has to be chosen per image, since a", "# When building the BA itself, we need this constant to avoid a", "everything into the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\") load(\":constants.shape.bzl\",", "the BA itself, we need this constant to avoid a circular # dependency.", "= ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else \"{}/{}\".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides =", "= \" \", default = None): s = _do_not_use_directly_get_cfg(name) return s.split(separator) if s", "len(lst): fail(\"antlir.version_set_to_path is a space-separated dict: k1 v1 k2 v2\") # A layer", "(c) Meta Platforms, Inc. and affiliates. # # This source code is licensed", "build-time constants (documented on REPO_CFG # below), and non-configurable constants that are currently", "this value except to use a different build mode. artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\",", "options, so we follow that pattern. config_key = CONFIG_KEY + \"#\" + flavor", "target using a host-mount must be listed in this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"),", "be listed in this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts required", "Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is", "list if the config is not set. # # We use space to", "- `rpm_installer`: The build appliance currently does not set a default package manager", "BA itself, we need this constant to avoid a circular # dependency. #", "are a huge footgun, and are a terrible idea for almost # every", "= _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available = _get_str_list_cfg(\"flavor_available\"), stable_flavors = _get_str_list_cfg(\"stable_flavors\"), flavor_default = _get_str_cfg(\"flavor_default\"),", "of this source tree. # This combines configurable build-time constants (documented on REPO_CFG", "json and loaded by the python side of the # `nspawn_in_subvol` sub system.", "so we # could easily move everything into the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\",", "Arguments - `name`: The name of the flavor - `build_appliance`: Path to a", "Buck has a notion of flavors that is separate from Antlir's but #", "are `feature`s # forbidden as dependencies?\" in `bzl/image/feature/new.bzl` for a # detailed explanation.", "listed in this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts required to", "\", default = None): s = _do_not_use_directly_get_cfg(name) return s.split(separator) if s else (default", "\"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix =", "( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or (native.read_config(\"python\", \"package_style\") == \"inplace\") ) and native.read_config(\"antlir\",", "(documented on REPO_CFG # below), and non-configurable constants that are currently not namespaced.", "_get_str_list_cfg(name, separator = \" \", default = None): s = _do_not_use_directly_get_cfg(name) return s.split(separator)", "2 * len(vs_to_path) != len(lst): fail(\"antlir.version_set_to_path is a space-separated dict: k1 v1 k2", "KEEP THIS DICTIONARY SMALL. # # For each `feature`, we have to emit", "`btrfs`, `dnf`, `yum`, `tar`, `ln`, ... - `rpm_installer`: The build appliance currently does", "system. In the future this would be # implemented via a `Shape` so", "name to be installed, the `nevra` defines its version. - `unsafe_bypass_flavor_check`: Do NOT", "or `/__antlir__` paths, see `snapshot_install_dir` doc. `None` uses the default determined by looking", "+ sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default = None):", "flavor_to_config REPO_CFG = shape.new( repo_config_t, # This one is not using the access", "loudly fail on a config value that is None. def _get_str_cfg(name, default =", "_do_not_use_directly_get_cfg(name) return s.split(separator) if s else (default or []) # Defaults to the", "we follow that pattern. config_key = CONFIG_KEY + \"#\" + flavor for key,", "this list, because we do not know the version set that the #", "== \"inplace\") ) and native.read_config(\"antlir\", \"require_repo\", \"true\") == \"true\", # This is a", "fail(\"antlir.artifact_key_to_path is a space-separated dict: k1 v1 k2 v2\") return key_to_path def new_nevra(**kwargs):", "len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is a space-separated dict: k1 v1 k2 v2\") return", "`snapshot_install_dir` doc. `None` uses the default determined by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`.", "`RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra` objects (see antlir/bzl/constants.bzl for definition). If rpm", "use. \"\"\" if build_appliance == None: fail( \"Must be a target path, or", "because that's what `.buckconfig` is supposed to support # for list configs (but", "what `.buckconfig` is supposed to support # for list configs (but does not,", "!= len(lst): fail(\"antlir.version_set_to_path is a space-separated dict: k1 v1 k2 v2\") # A", "value from `constants.bzl`\", \"build_appliance\", ) if rpm_installer != \"yum\" and rpm_installer != \"dnf\":", "callers # cannot get confused whether `None` refers to \"no BA\" or \"default", "`None` uses the default determined by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`:", "the build mode # provided, ie `@mode/opt` vs `@mode/dev`. And the build mode", "config will generally loudly fail on a config value that is None. def", "the empty list if the config is not set def _get_artifact_key_to_path(): lst =", "as the delimiter for per-flavor # config options, so we follow that pattern.", "way to override this value except to use a different build mode. artifacts_require_repo", "not, due to bugs). def _get_str_list_cfg(name, separator = \" \", default = None):", "a notion of flavors that is separate from Antlir's but # similar in", "from Antlir's but # similar in spirit. It uses # as the delimiter", "version. - `unsafe_bypass_flavor_check`: Do NOT use. \"\"\" if build_appliance == None: fail( \"Must", "precedence order # because the way this is determined is *always* based on", "return shape.new( flavor_config_t, name = name, build_appliance = build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot", "as there are # elements in this list, because we do not know", "\"repo_config_t\") load(\":snapshot_install_dir.bzl\", \"RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR\", \"snapshot_install_dir\") load(\":target_helpers.bzl\", \"normalize_target\") DO_NOT_USE_BUILD_APPLIANCE = \"__DO_NOT_USE_BUILD_APPLIANCE__\" CONFIG_KEY = \"antlir\" BZL_CONST", "version locking # via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = \"TROLLING TROLLING TROLLING\" return", "Buck # supported providers like Bazel does. antlir_linux_flavor = _get_str_cfg(\"antlir_linux_flavor\", allow_none = True),", "!= None: return val return default # We don't have \"globally required\" configs", "= shape.new( repo_config_t, # This one is not using the access methods to", "create an easy-to-review code bottleneck, any # feature target using a host-mount must", "allowed in target # paths, and also because that's what `.buckconfig` is supposed", "new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides", "we do not know the version set that the # including `image.layer` will", "do not know the version set that the # including `image.layer` will use.", "fail on a config value that is None. def _get_str_cfg(name, default = None,", "to avoid a circular # dependency. # # This feature is exposed a", "def _get_flavor_to_config(): flavor_to_config = {} for flavor, orig_flavor_config in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config =", "Antlir team tightly controls the usage of host mounts, # since they are", "defines its version. - `unsafe_bypass_flavor_check`: Do NOT use. \"\"\" if build_appliance == None:", "`nevra` objects (see antlir/bzl/constants.bzl for definition). If rpm with given name to be", "way this is determined is *always* based on the build mode # provided,", "= \"TROLLING TROLLING TROLLING\" return vs_to_path # Defaults to the empty list if", "split, so we # could easily move everything into the struct. # load(\"//antlir/bzl:oss_shim.bzl\",", "that is separate from Antlir's but # similar in spirit. It uses #", "val != None: return val return default # We don't have \"globally required\"", "be installed, the `nevra` defines its version. - `unsafe_bypass_flavor_check`: Do NOT use. \"\"\"", "artifacts_require_repo = ( (native.read_config(\"defaults.cxx_library\", \"type\") == \"shared\") or (native.read_config(\"python\", \"package_style\") == \"inplace\") )", "footgun, and are a terrible idea for almost # every application. To create", "each `feature`, we have to emit as many targets as there are #", "to provide the precedence order # because the way this is determined is", "Defaults to the empty list if the config is not set def _get_artifact_key_to_path():", "reason for this struct / non-struct split, so we # could easily move", "key_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is a", "mode provided # determines the value of the `.buckconfig` properties used. There is", "in do_not_use_repo_cfg.get(\"flavor_to_config\", {}).items(): flavor_config = {\"name\": flavor} flavor_config.update(orig_flavor_config) # we'll mutate a copy", "is exposed a non-`None` magic constant so that callers # cannot get confused", "\"type\") == \"shared\") or (native.read_config(\"python\", \"package_style\") == \"inplace\") ) and native.read_config(\"antlir\", \"require_repo\", \"true\")", "move everything into the struct. # load(\"//antlir/bzl:oss_shim.bzl\", \"config\", \"do_not_use_repo_cfg\") load(\"//antlir/bzl:sha256.bzl\", \"sha256_b64\") load(\"//antlir/bzl:shape.bzl\", \"shape\")", "\"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg`", "can be maintained across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( \"host_mounts_for_repo_artifacts\", ), flavor_available =", "TROLLING TROLLING\" return vs_to_path # Defaults to the empty list if the config", "= \"INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA\", ) def version_set_override_name(current_target): return \"vset-override-\" + sha256_b64(current_target) # Use `_get_str_cfg` or", "dict: k1 v1 k2 v2\") # A layer can turn off version locking", "not set def _get_version_set_to_path(): lst = _get_str_list_cfg(\"version_set_to_path\") vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2", "is a space-separated dict: k1 v1 k2 v2\") return key_to_path def new_nevra(**kwargs): return", "layer target of a build appliance, containing an installed `rpm_repo_snapshot()`, plus an OS", "support multiple package managers. In the future, if specifying a non-default installer per", "not namespaced. # # Note that there's no deep reason for this struct", "this config. host_mounts_allowed_in_targets = _get_str_list_cfg(\"host_mounts_allowed_in_targets\"), # Enumerates host mounts required to execute FB", "= do_not_use_repo_cfg.get(name) if val != None: return val return default # We don't", "dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path) != len(lst): fail(\"antlir.artifact_key_to_path is a space-separated dict:" ]
[ "self.conn = self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data def set_user(self,data):", "sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ') if '\\n' in x else x", "data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass def", "sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r')", "self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try:", "self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone()", "for x in sql_list] for sql_item in sql_list: self.insert(sql_item) except Exception as e:", "sql_list: self.insert(sql_item) except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql)", "#-*- coding: UTF-8 -*- import os from dbutils.pooled_db import PooledDB import pymysql class", "错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\"", "self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data def set_user(self,data): self.user = data def set_password(self,data):", "'root' self.password = '<PASSWORD>' self.db = 'tushare' self.conn_num = 5 self.port = 3306", "= f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ') if '\\n' in x else x for", "try: self.cur.execute(sql) data = self.cur.fetchall() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r')", "Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except", "[x.replace('\\n',' ') if '\\n' in x else x for x in sql_list] for", "data def set_password(self,data): self.password = data def set_db(self,data): self.db = data def set_port(self,data):", "in sql_list] for sql_item in sql_list: self.insert(sql_item) except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r')", "as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception", "host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data):", "错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall() return data except Exception", "set_user(self,data): self.user = data def set_password(self,data): self.password = data def set_db(self,data): self.db =", "错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass def select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone()", "self.cur.fetchall() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally:", "错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r')", "def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit()", "= 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection()", "prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e:", "dbutils.pooled_db import PooledDB import pymysql class Mysql(object): 'Mysql element' def __init__(self): self.mysql_host =", "= data def set_user(self,data): self.user = data def set_password(self,data): self.password = data def", "def select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone() return data except Exception as e:", "delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def", "except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit() except", "except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql) data =", "self.port = data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8') as f: print(f)", "= [x.replace('\\n',' ') if '\\n' in x else x for x in sql_list]", "print(f) sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ') if '\\n' in x else", "错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\"", "set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8') as f:", "insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def", "data = self.cur.fetchall() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else:", "else: pass finally: pass def select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone() return data", "execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8') as f: print(f) sql_list = f.read().split(';')[:-1] sql_list", "'127.0.0.1' self.user = 'root' self.password = '<PASSWORD>' self.db = 'tushare' self.conn_num = 5", "= PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection() self.cur = self.conn.cursor()", "self.cur.fetchone() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try:", "Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall()", "self.user = data def set_password(self,data): self.password = data def set_db(self,data): self.db = data", "= data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8') as f: print(f) sql_list", "sql_list = [x.replace('\\n',' ') if '\\n' in x else x for x in", "in x else x for x in sql_list] for sql_item in sql_list: self.insert(sql_item)", "') if '\\n' in x else x for x in sql_list] for sql_item", "x else x for x in sql_list] for sql_item in sql_list: self.insert(sql_item) except", "data = self.cur.fetchone() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def", "self.mysql_host = '127.0.0.1' self.user = 'root' self.password = '<PASSWORD>' self.db = 'tushare' self.conn_num", "Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except", "user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host", "self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql)", "def set_password(self,data): self.password = data def set_db(self,data): self.db = data def set_port(self,data): self.port", "self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql)", "prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\"", "= 5 self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port)", "data def set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8')", "as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall() return", "try: self.cur.execute(sql) data = self.cur.fetchone() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r')", "e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall() return data", "self.db = 'tushare' self.conn_num = 5 self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host,", "self.password = data def set_db(self,data): self.db = data def set_port(self,data): self.port = data", "'<PASSWORD>' self.db = 'tushare' self.conn_num = 5 self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num,", "5 self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn", "self.db = data def set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding", "select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\"", "os from dbutils.pooled_db import PooledDB import pymysql class Mysql(object): 'Mysql element' def __init__(self):", "as f: print(f) sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ') if '\\n' in", "try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql):", "prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass def select_one(self,sql): try: self.cur.execute(sql) data =", "def set_mysql_host(self,data): self.mysql_host = data def set_user(self,data): self.user = data def set_password(self,data): self.password", "'\\n' in x else x for x in sql_list] for sql_item in sql_list:", "def set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8') as", "x for x in sql_list] for sql_item in sql_list: self.insert(sql_item) except Exception as", "except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass def select_one(self,sql):", "f: print(f) sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ') if '\\n' in x", "open(sql_file_path,'r+',encoding = 'utf8') as f: print(f) sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ')", "data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone()", "x in sql_list] for sql_item in sql_list: self.insert(sql_item) except Exception as e: prcc(\"错误描述:\"+str(e)+\"", "sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r')", "data def set_user(self,data): self.user = data def set_password(self,data): self.password = data def set_db(self,data):", "= data def set_password(self,data): self.password = data def set_db(self,data): self.db = data def", "data def set_db(self,data): self.db = data def set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path):", "= self.cur.fetchall() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass", "except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit()", "if '\\n' in x else x for x in sql_list] for sql_item in", "f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ') if '\\n' in x else x for x", "from dbutils.pooled_db import PooledDB import pymysql class Mysql(object): 'Mysql element' def __init__(self): self.mysql_host", "coding: UTF-8 -*- import os from dbutils.pooled_db import PooledDB import pymysql class Mysql(object):", "import PooledDB import pymysql class Mysql(object): 'Mysql element' def __init__(self): self.mysql_host = '127.0.0.1'", "= data def set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding =", "sql_item in sql_list: self.insert(sql_item) except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql):", "as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass def select_one(self,sql): try: self.cur.execute(sql)", "class Mysql(object): 'Mysql element' def __init__(self): self.mysql_host = '127.0.0.1' self.user = 'root' self.password", "sql_list] for sql_item in sql_list: self.insert(sql_item) except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit()", "sys.exit() else: pass finally: pass def select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone() return", "= self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data def set_user(self,data): self.user", "= 'root' self.password = '<PASSWORD>' self.db = 'tushare' self.conn_num = 5 self.port =", "import os from dbutils.pooled_db import PooledDB import pymysql class Mysql(object): 'Mysql element' def", "3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection() self.cur", "pass finally: pass def select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone() return data except", "self.cur.execute(sql) data = self.cur.fetchone() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit()", "PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection() self.cur = self.conn.cursor() def", "def select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall() return data except Exception as e:", "except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit()", "self.user = 'root' self.password = '<PASSWORD>' self.db = 'tushare' self.conn_num = 5 self.port", "finally: pass def select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone() return data except Exception", "#!/usr/bin/python #-*- coding: UTF-8 -*- import os from dbutils.pooled_db import PooledDB import pymysql", "select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\"", "sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit()", "self.password = '<PASSWORD>' self.db = 'tushare' self.conn_num = 5 self.port = 3306 self.pool", "as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception", "Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass def select_one(self,sql): try:", "self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data def set_user(self,data): self.user =", "self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try:", "= self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data def set_user(self,data): self.user = data def", "'utf8') as f: print(f) sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ') if '\\n'", "= 'utf8') as f: print(f) sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\\n',' ') if", "passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host =", "sys.exit() def select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall() return data except Exception as", "return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass", "in sql_list: self.insert(sql_item) except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try:", "data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8') as f: print(f) sql_list =", "pymysql class Mysql(object): 'Mysql element' def __init__(self): self.mysql_host = '127.0.0.1' self.user = 'root'", "else x for x in sql_list] for sql_item in sql_list: self.insert(sql_item) except Exception", "Mysql(object): 'Mysql element' def __init__(self): self.mysql_host = '127.0.0.1' self.user = 'root' self.password =", "'Mysql element' def __init__(self): self.mysql_host = '127.0.0.1' self.user = 'root' self.password = '<PASSWORD>'", "with open(sql_file_path,'r+',encoding = 'utf8') as f: print(f) sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\\n','", "try: with open(sql_file_path,'r+',encoding = 'utf8') as f: print(f) sql_list = f.read().split(';')[:-1] sql_list =", "as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit() except Exception as", "<reponame>huangxl-github/base-tushare-stock<filename>btstock/db/db_mysql.py<gh_stars>1-10 #!/usr/bin/python #-*- coding: UTF-8 -*- import os from dbutils.pooled_db import PooledDB import", "try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql):", "e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as", "UTF-8 -*- import os from dbutils.pooled_db import PooledDB import pymysql class Mysql(object): 'Mysql", "e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass def select_one(self,sql): try: self.cur.execute(sql) data", "def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8') as f: print(f) sql_list = f.read().split(';')[:-1]", "PooledDB import pymysql class Mysql(object): 'Mysql element' def __init__(self): self.mysql_host = '127.0.0.1' self.user", "self.insert(sql_item) except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql) data", "= '<PASSWORD>' self.db = 'tushare' self.conn_num = 5 self.port = 3306 self.pool =", "pass def select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone() return data except Exception as", "self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data def set_user(self,data): self.user = data", "= self.cur.fetchone() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql):", "return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql)", "prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e:", "Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit() except Exception", "def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit()", "e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as", "self.mysql_host = data def set_user(self,data): self.user = data def set_password(self,data): self.password = data", "prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall() return data except", "= data def set_db(self,data): self.db = data def set_port(self,data): self.port = data def", "self.conn.commit() except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit()", "db=self.db, port=self.port) self.conn = self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data", "def set_user(self,data): self.user = data def set_password(self,data): self.password = data def set_db(self,data): self.db", "self.conn_num = 5 self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db,", "port=self.port) self.conn = self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data def", "def __init__(self): self.mysql_host = '127.0.0.1' self.user = 'root' self.password = '<PASSWORD>' self.db =", "'tushare' self.conn_num = 5 self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password,", "self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn =", "self.cur.execute(sql) data = self.cur.fetchall() return data except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit()", "element' def __init__(self): self.mysql_host = '127.0.0.1' self.user = 'root' self.password = '<PASSWORD>' self.db", "= 'tushare' self.conn_num = 5 self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user,", "set_mysql_host(self,data): self.mysql_host = data def set_user(self,data): self.user = data def set_password(self,data): self.password =", "import pymysql class Mysql(object): 'Mysql element' def __init__(self): self.mysql_host = '127.0.0.1' self.user =", "self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection() self.cur =", "= '127.0.0.1' self.user = 'root' self.password = '<PASSWORD>' self.db = 'tushare' self.conn_num =", "set_db(self,data): self.db = data def set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path): try: with", "-*- import os from dbutils.pooled_db import PooledDB import pymysql class Mysql(object): 'Mysql element'", "e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit() except Exception as e:", "def set_db(self,data): self.db = data def set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path): try:", "for sql_item in sql_list: self.insert(sql_item) except Exception as e: prcc(\"错误描述:\"+str(e)+\" 错误行:\"+str(e.__traceback__.tb_lineno),'r') sys.exit() def", "set_password(self,data): self.password = data def set_db(self,data): self.db = data def set_port(self,data): self.port =", "__init__(self): self.mysql_host = '127.0.0.1' self.user = 'root' self.password = '<PASSWORD>' self.db = 'tushare'" ]
[ "= argv[1] if len(argv) > 1 else 1000 avc_first = get_output(\"./avctest/avctest %s 1\"", "check_output(command.split()).split()) tests = argv[1] if len(argv) > 1 else 1000 avc_first = get_output(\"./avctest/avctest", "%s 0\" % tests) uavc_first = get_output(\"./uavctest/uavctest %s 1\" % tests) uavc_other =", "> 1 else 1000 avc_first = get_output(\"./avctest/avctest %s 1\" % tests) avc_other =", "AVC ===\" print \"first: %f (std. %f)\" % (mean_avc_first, std_avc_first) print \"other: %f", "(std. %f)\" % (mean_uavc_first, std_uavc_first) print \"other: %f (std. %f)\" % (mean_uavc_other, std_uavc_other)", "= np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first =", "get_output(command): return map(int, check_output(command.split()).split()) tests = argv[1] if len(argv) > 1 else 1000", "avc_other = get_output(\"./avctest/avctest %s 0\" % tests) uavc_first = get_output(\"./uavctest/uavctest %s 1\" %", "std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other) print \"=== AVC ===\" print \"first: %f", "np from sys import argv def get_output(command): return map(int, check_output(command.split()).split()) tests = argv[1]", "get_output(\"./avctest/avctest %s 1\" % tests) avc_other = get_output(\"./avctest/avctest %s 0\" % tests) uavc_first", "\"first: %f (std. %f)\" % (mean_avc_first, std_avc_first) print \"other: %f (std. %f)\" %", "np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other)", "%f (std. %f)\" % (mean_avc_first, std_avc_first) print \"other: %f (std. %f)\" % (mean_avc_other,", "uavc_first = get_output(\"./uavctest/uavctest %s 1\" % tests) uavc_other = get_output(\"./uavctest/uavctest %s 0\" %", "get_output(\"./uavctest/uavctest %s 0\" % tests) mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first =", "1000 avc_first = get_output(\"./avctest/avctest %s 1\" % tests) avc_other = get_output(\"./avctest/avctest %s 0\"", "print \"first: %f (std. %f)\" % (mean_avc_first, std_avc_first) print \"other: %f (std. %f)\"", "% tests) mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other =", "else 1000 avc_first = get_output(\"./avctest/avctest %s 1\" % tests) avc_other = get_output(\"./avctest/avctest %s", "1\" % tests) uavc_other = get_output(\"./uavctest/uavctest %s 0\" % tests) mean_avc_first = np.mean(avc_first)", "0\" % tests) mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other", "tests) uavc_other = get_output(\"./uavctest/uavctest %s 0\" % tests) mean_avc_first = np.mean(avc_first) mean_avc_other =", "%s 1\" % tests) uavc_other = get_output(\"./uavctest/uavctest %s 0\" % tests) mean_avc_first =", "\"other: %f (std. %f)\" % (mean_avc_other, std_avc_other) print \"=== uAVC ===\" print \"first:", "len(argv) > 1 else 1000 avc_first = get_output(\"./avctest/avctest %s 1\" % tests) avc_other", "% tests) uavc_other = get_output(\"./uavctest/uavctest %s 0\" % tests) mean_avc_first = np.mean(avc_first) mean_avc_other", "np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first)", "= np.std(uavc_other) print \"=== AVC ===\" print \"first: %f (std. %f)\" % (mean_avc_first,", "np.std(uavc_other) print \"=== AVC ===\" print \"first: %f (std. %f)\" % (mean_avc_first, std_avc_first)", "%f (std. %f)\" % (mean_avc_other, std_avc_other) print \"=== uAVC ===\" print \"first: %f", "%f)\" % (mean_avc_first, std_avc_first) print \"other: %f (std. %f)\" % (mean_avc_other, std_avc_other) print", "(mean_avc_first, std_avc_first) print \"other: %f (std. %f)\" % (mean_avc_other, std_avc_other) print \"=== uAVC", "std_avc_first) print \"other: %f (std. %f)\" % (mean_avc_other, std_avc_other) print \"=== uAVC ===\"", "% (mean_avc_other, std_avc_other) print \"=== uAVC ===\" print \"first: %f (std. %f)\" %", "get_output(\"./uavctest/uavctest %s 1\" % tests) uavc_other = get_output(\"./uavctest/uavctest %s 0\" % tests) mean_avc_first", "= get_output(\"./avctest/avctest %s 1\" % tests) avc_other = get_output(\"./avctest/avctest %s 0\" % tests)", "import numpy as np from sys import argv def get_output(command): return map(int, check_output(command.split()).split())", "tests) uavc_first = get_output(\"./uavctest/uavctest %s 1\" % tests) uavc_other = get_output(\"./uavctest/uavctest %s 0\"", "= get_output(\"./uavctest/uavctest %s 1\" % tests) uavc_other = get_output(\"./uavctest/uavctest %s 0\" % tests)", "sys import argv def get_output(command): return map(int, check_output(command.split()).split()) tests = argv[1] if len(argv)", "tests) mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other)", "tests = argv[1] if len(argv) > 1 else 1000 avc_first = get_output(\"./avctest/avctest %s", "return map(int, check_output(command.split()).split()) tests = argv[1] if len(argv) > 1 else 1000 avc_first", "= get_output(\"./uavctest/uavctest %s 0\" % tests) mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first", "= np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first =", "\"=== uAVC ===\" print \"first: %f (std. %f)\" % (mean_uavc_first, std_uavc_first) print \"other:", "===\" print \"first: %f (std. %f)\" % (mean_uavc_first, std_uavc_first) print \"other: %f (std.", "avc_first = get_output(\"./avctest/avctest %s 1\" % tests) avc_other = get_output(\"./avctest/avctest %s 0\" %", "argv def get_output(command): return map(int, check_output(command.split()).split()) tests = argv[1] if len(argv) > 1", "if len(argv) > 1 else 1000 avc_first = get_output(\"./avctest/avctest %s 1\" % tests)", "tests) avc_other = get_output(\"./avctest/avctest %s 0\" % tests) uavc_first = get_output(\"./uavctest/uavctest %s 1\"", "#!/usr/bin/env python from subprocess import check_output import numpy as np from sys import", "===\" print \"first: %f (std. %f)\" % (mean_avc_first, std_avc_first) print \"other: %f (std.", "1\" % tests) avc_other = get_output(\"./avctest/avctest %s 0\" % tests) uavc_first = get_output(\"./uavctest/uavctest", "0\" % tests) uavc_first = get_output(\"./uavctest/uavctest %s 1\" % tests) uavc_other = get_output(\"./uavctest/uavctest", "= np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other =", "np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other) print \"=== AVC", "print \"=== AVC ===\" print \"first: %f (std. %f)\" % (mean_avc_first, std_avc_first) print", "= np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other) print \"===", "%s 1\" % tests) avc_other = get_output(\"./avctest/avctest %s 0\" % tests) uavc_first =", "np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other) print \"=== AVC ===\" print \"first:", "= np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other =", "uavc_other = get_output(\"./uavctest/uavctest %s 0\" % tests) mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other)", "std_uavc_other = np.std(uavc_other) print \"=== AVC ===\" print \"first: %f (std. %f)\" %", "print \"=== uAVC ===\" print \"first: %f (std. %f)\" % (mean_uavc_first, std_uavc_first) print", "%s 0\" % tests) mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first)", "mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first", "std_avc_other) print \"=== uAVC ===\" print \"first: %f (std. %f)\" % (mean_uavc_first, std_uavc_first)", "from subprocess import check_output import numpy as np from sys import argv def", "print \"other: %f (std. %f)\" % (mean_avc_other, std_avc_other) print \"=== uAVC ===\" print", "%f)\" % (mean_avc_other, std_avc_other) print \"=== uAVC ===\" print \"first: %f (std. %f)\"", "(mean_avc_other, std_avc_other) print \"=== uAVC ===\" print \"first: %f (std. %f)\" % (mean_uavc_first,", "argv[1] if len(argv) > 1 else 1000 avc_first = get_output(\"./avctest/avctest %s 1\" %", "= np.std(uavc_first) std_uavc_other = np.std(uavc_other) print \"=== AVC ===\" print \"first: %f (std.", "print \"first: %f (std. %f)\" % (mean_uavc_first, std_uavc_first) print \"other: %f (std. %f)\"", "import argv def get_output(command): return map(int, check_output(command.split()).split()) tests = argv[1] if len(argv) >", "mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other", "\"first: %f (std. %f)\" % (mean_uavc_first, std_uavc_first) print \"other: %f (std. %f)\" %", "np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first)", "subprocess import check_output import numpy as np from sys import argv def get_output(command):", "check_output import numpy as np from sys import argv def get_output(command): return map(int,", "% tests) avc_other = get_output(\"./avctest/avctest %s 0\" % tests) uavc_first = get_output(\"./uavctest/uavctest %s", "numpy as np from sys import argv def get_output(command): return map(int, check_output(command.split()).split()) tests", "= np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other) print \"=== AVC ===\" print", "get_output(\"./avctest/avctest %s 0\" % tests) uavc_first = get_output(\"./uavctest/uavctest %s 1\" % tests) uavc_other", "(std. %f)\" % (mean_avc_first, std_avc_first) print \"other: %f (std. %f)\" % (mean_avc_other, std_avc_other)", "%f (std. %f)\" % (mean_uavc_first, std_uavc_first) print \"other: %f (std. %f)\" % (mean_uavc_other,", "np.std(uavc_first) std_uavc_other = np.std(uavc_other) print \"=== AVC ===\" print \"first: %f (std. %f)\"", "\"=== AVC ===\" print \"first: %f (std. %f)\" % (mean_avc_first, std_avc_first) print \"other:", "= get_output(\"./avctest/avctest %s 0\" % tests) uavc_first = get_output(\"./uavctest/uavctest %s 1\" % tests)", "uAVC ===\" print \"first: %f (std. %f)\" % (mean_uavc_first, std_uavc_first) print \"other: %f", "from sys import argv def get_output(command): return map(int, check_output(command.split()).split()) tests = argv[1] if", "std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other) print", "import check_output import numpy as np from sys import argv def get_output(command): return", "def get_output(command): return map(int, check_output(command.split()).split()) tests = argv[1] if len(argv) > 1 else", "mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first", "std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other) print \"=== AVC ===\"", "(std. %f)\" % (mean_avc_other, std_avc_other) print \"=== uAVC ===\" print \"first: %f (std.", "% tests) uavc_first = get_output(\"./uavctest/uavctest %s 1\" % tests) uavc_other = get_output(\"./uavctest/uavctest %s", "1 else 1000 avc_first = get_output(\"./avctest/avctest %s 1\" % tests) avc_other = get_output(\"./avctest/avctest", "mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other", "% (mean_avc_first, std_avc_first) print \"other: %f (std. %f)\" % (mean_avc_other, std_avc_other) print \"===", "np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other)", "as np from sys import argv def get_output(command): return map(int, check_output(command.split()).split()) tests =", "python from subprocess import check_output import numpy as np from sys import argv", "map(int, check_output(command.split()).split()) tests = argv[1] if len(argv) > 1 else 1000 avc_first =" ]
[ "from __future__ import print_function, division from .core import do_one, exhaust, switch def typed(ruletypes):", "rules based on the expression type inputs: ruletypes -- a dict mapping {Type:", "exhaust, switch def typed(ruletypes): \"\"\"Apply rules based on the expression type inputs: ruletypes", "division from .core import do_one, exhaust, switch def typed(ruletypes): \"\"\"Apply rules based on", "do_one, exhaust, switch def typed(ruletypes): \"\"\"Apply rules based on the expression type inputs:", "based on the expression type inputs: ruletypes -- a dict mapping {Type: rule}", "switch def typed(ruletypes): \"\"\"Apply rules based on the expression type inputs: ruletypes --", "<reponame>logpy/strategies from __future__ import print_function, division from .core import do_one, exhaust, switch def", "typed(ruletypes): \"\"\"Apply rules based on the expression type inputs: ruletypes -- a dict", "import print_function, division from .core import do_one, exhaust, switch def typed(ruletypes): \"\"\"Apply rules", "type inputs: ruletypes -- a dict mapping {Type: rule} \"\"\" return switch(type, ruletypes)", "import do_one, exhaust, switch def typed(ruletypes): \"\"\"Apply rules based on the expression type", "expression type inputs: ruletypes -- a dict mapping {Type: rule} \"\"\" return switch(type,", "\"\"\"Apply rules based on the expression type inputs: ruletypes -- a dict mapping", "print_function, division from .core import do_one, exhaust, switch def typed(ruletypes): \"\"\"Apply rules based", "the expression type inputs: ruletypes -- a dict mapping {Type: rule} \"\"\" return", ".core import do_one, exhaust, switch def typed(ruletypes): \"\"\"Apply rules based on the expression", "from .core import do_one, exhaust, switch def typed(ruletypes): \"\"\"Apply rules based on the", "__future__ import print_function, division from .core import do_one, exhaust, switch def typed(ruletypes): \"\"\"Apply", "def typed(ruletypes): \"\"\"Apply rules based on the expression type inputs: ruletypes -- a", "on the expression type inputs: ruletypes -- a dict mapping {Type: rule} \"\"\"" ]
[ "number of bathrooms of your property: \")) predicted_price = lr.predict([[input_distance, input_bedrooms, input_bathrooms]]) print(round(predicted_price[0],2))", "used to store trained model import pickle # Open trained model and assigned", "pickle.load(file) # Predict price based on console input, use for debugging input_distance =", "variable with open('property_model_Bristle.pickle', 'rb') as file: lr = pickle.load(file) # Predict price based", "# Open trained model and assigned to variable with open('property_model_Bristle.pickle', 'rb') as file:", "train station: \")) input_bedrooms = int(input(\"Please input the number of bedrooms of your", "trained model and assigned to variable with open('property_model_Bristle.pickle', 'rb') as file: lr =", "model and assigned to variable with open('property_model_Bristle.pickle', 'rb') as file: lr = pickle.load(file)", "input the number of bathrooms of your property: \")) predicted_price = lr.predict([[input_distance, input_bedrooms,", "the distance to the train station: \")) input_bedrooms = int(input(\"Please input the number", "with open('property_model_Bristle.pickle', 'rb') as file: lr = pickle.load(file) # Predict price based on", "open('property_model_Bristle.pickle', 'rb') as file: lr = pickle.load(file) # Predict price based on console", "distance to the train station: \")) input_bedrooms = int(input(\"Please input the number of", "'rb') as file: lr = pickle.load(file) # Predict price based on console input,", "on console input, use for debugging input_distance = float(input(\"Please enter the distance to", "of bedrooms of your property: \")) input_bathrooms = int(input(\"Please input the number of", "input, use for debugging input_distance = float(input(\"Please enter the distance to the train", "to the train station: \")) input_bedrooms = int(input(\"Please input the number of bedrooms", "Predict price based on console input, use for debugging input_distance = float(input(\"Please enter", "model import pickle # Open trained model and assigned to variable with open('property_model_Bristle.pickle',", "= pickle.load(file) # Predict price based on console input, use for debugging input_distance", "\")) input_bathrooms = int(input(\"Please input the number of bathrooms of your property: \"))", "pickle # Open trained model and assigned to variable with open('property_model_Bristle.pickle', 'rb') as", "bedrooms of your property: \")) input_bathrooms = int(input(\"Please input the number of bathrooms", "of your property: \")) input_bathrooms = int(input(\"Please input the number of bathrooms of", "import pickle # Open trained model and assigned to variable with open('property_model_Bristle.pickle', 'rb')", "property: \")) input_bathrooms = int(input(\"Please input the number of bathrooms of your property:", "input_bedrooms = int(input(\"Please input the number of bedrooms of your property: \")) input_bathrooms", "the number of bathrooms of your property: \")) predicted_price = lr.predict([[input_distance, input_bedrooms, input_bathrooms]])", "your property: \")) input_bathrooms = int(input(\"Please input the number of bathrooms of your", "= int(input(\"Please input the number of bedrooms of your property: \")) input_bathrooms =", "lr = pickle.load(file) # Predict price based on console input, use for debugging", "trained model import pickle # Open trained model and assigned to variable with", "and assigned to variable with open('property_model_Bristle.pickle', 'rb') as file: lr = pickle.load(file) #", "as file: lr = pickle.load(file) # Predict price based on console input, use", "enter the distance to the train station: \")) input_bedrooms = int(input(\"Please input the", "store trained model import pickle # Open trained model and assigned to variable", "use for debugging input_distance = float(input(\"Please enter the distance to the train station:", "float(input(\"Please enter the distance to the train station: \")) input_bedrooms = int(input(\"Please input", "file: lr = pickle.load(file) # Predict price based on console input, use for", "library used to store trained model import pickle # Open trained model and", "assigned to variable with open('property_model_Bristle.pickle', 'rb') as file: lr = pickle.load(file) # Predict", "based on console input, use for debugging input_distance = float(input(\"Please enter the distance", "input the number of bedrooms of your property: \")) input_bathrooms = int(input(\"Please input", "= float(input(\"Please enter the distance to the train station: \")) input_bedrooms = int(input(\"Please", "to store trained model import pickle # Open trained model and assigned to", "debugging input_distance = float(input(\"Please enter the distance to the train station: \")) input_bedrooms", "int(input(\"Please input the number of bathrooms of your property: \")) predicted_price = lr.predict([[input_distance,", "Imports pickle library used to store trained model import pickle # Open trained", "# Predict price based on console input, use for debugging input_distance = float(input(\"Please", "pickle library used to store trained model import pickle # Open trained model", "station: \")) input_bedrooms = int(input(\"Please input the number of bedrooms of your property:", "console input, use for debugging input_distance = float(input(\"Please enter the distance to the", "the train station: \")) input_bedrooms = int(input(\"Please input the number of bedrooms of", "\")) input_bedrooms = int(input(\"Please input the number of bedrooms of your property: \"))", "int(input(\"Please input the number of bedrooms of your property: \")) input_bathrooms = int(input(\"Please", "for debugging input_distance = float(input(\"Please enter the distance to the train station: \"))", "number of bedrooms of your property: \")) input_bathrooms = int(input(\"Please input the number", "# Imports pickle library used to store trained model import pickle # Open", "input_bathrooms = int(input(\"Please input the number of bathrooms of your property: \")) predicted_price", "price based on console input, use for debugging input_distance = float(input(\"Please enter the", "= int(input(\"Please input the number of bathrooms of your property: \")) predicted_price =", "input_distance = float(input(\"Please enter the distance to the train station: \")) input_bedrooms =", "Open trained model and assigned to variable with open('property_model_Bristle.pickle', 'rb') as file: lr", "the number of bedrooms of your property: \")) input_bathrooms = int(input(\"Please input the", "to variable with open('property_model_Bristle.pickle', 'rb') as file: lr = pickle.load(file) # Predict price" ]
[ "EC from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import Options", "browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10) try: # wait for all the JSON", "and action mapping from AWS docs import json import time from selenium import", "actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type = str(fields[0].text.replace('*', '')) if", "= webdriver.Opera(options = options, executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path", "webdriver.Firefox(options = options, executable_path = './geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10) try:", "webdriver.Firefox(options = options, executable_path = './geckodriver') # open the general page listing the", "expected_conditions as EC from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options", "type, scrape it and its ARN format if len(tables) > 1: for resource", "# wait until page has fully loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights')))", "# get list of all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services", "and its ARN format if len(tables) > 1: for resource in resources: fields", "# get IAM service name and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table')", "scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else: # get IAM service name and", "service name and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables) >", "except TimeoutError: pass else: # get IAM service name and tables namespace =", "'highlights'))) except: pass actions_json = {} resources_json = {} # get list of", "# store resource type -> actions mapping for action in actions: fields =", "table is the list of resource types if len(tables) > 1: resources =", "services and scrape their tables for row in rows: a_path = row.find_elements_by_tag_name('a')[0] url", "Options from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import Options options = Options() options.add_argument('--headless')", "a URL to a IAM service's actions, resources, and condition keys list and", "list and scrapes the tables def get_tables(url): # browser = webdriver.Chrome(options = options)", "namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif len(fields) > 3: resource_type = str(fields[3].text.replace('*',", "a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints to files file = open('actions.json', 'w')", "len(fields) == 3: resource_type = str(fields[0].text.replace('*', '')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else:", "pass else: # get IAM service name and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables", "browser.find_elements_by_tag_name('table') if len(tables) > 0: # first table is the list of actions", "str(fields[0].text.replace('*', '')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif len(fields)", "str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission only]', '') action_name = action_name.lower() if resource_type", "= dict() if len(tables) > 0: previous_name = '' # store resource type", "= [action_name] previous_name = action_name # save the constraints actions_json[namespace] = namespace_json namespace_json", "the JSON elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else:", "mapping for action in actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type", "services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services and scrape their tables for", "'awsdocs-view'))) except TimeoutError: pass else: # get IAM service name and tables namespace", "all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json = {} resources_json =", "3: resource_type = str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission only]', '') action_name =", "# browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options = options, executable_path", "if len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if len(tables)", "their tables for row in rows: a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url)", "keys for all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10)", "= browser.find_elements_by_tag_name('table') if len(tables) > 0: # first table is the list of", "list of all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services and scrape", "a resource type, scrape it and its ARN format if len(tables) > 1:", "for row in rows: a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit()", "row in rows: a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() #", "get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints to files file = open('actions.json', 'w') file.write(json.dumps(actions_json,", "table is the list of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second table", "options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a URL to a IAM service's actions, resources,", "of all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services and scrape their", "elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json = {} resources_json = {} # get", "browser.quit() # dump constraints to files file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close()", "second table is the list of resource types if len(tables) > 1: resources", "get IAM service name and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if", "options, executable_path = './geckodriver') # open the general page listing the actions, resource", "and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables) > 0: #", "'./geckodriver') # open the general page listing the actions, resource types, and condition", "browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services and scrape their tables for row in rows:", "[permission only]', '') action_name = action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type]", "else: # get IAM service name and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables =", "> 3: resource_type = str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission only]', '') action_name", "3: resource_type = str(fields[0].text.replace('*', '')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] =", "resource_type = str(fields[0].text.replace('*', '')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name]", "= './geckodriver') # open the general page listing the actions, resource types, and", "10) try: # wait for all the JSON elements to load before scraping.", "if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name = action_name #", "executable_path = './geckodriver') # open the general page listing the actions, resource types,", "# second table is the list of resource types if len(tables) > 1:", "resources, and condition keys list and scrapes the tables def get_tables(url): # browser", "import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import", "all the JSON elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass", "namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name = action_name # save the constraints actions_json[namespace]", "save the constraints actions_json[namespace] = namespace_json namespace_json = dict() #if there is a", "= webdriver.Chrome(options = options) # browser = webdriver.Opera(options = options, executable_path = './operadriver')", "resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if len(tables) > 0: previous_name =", "takes in a URL to a IAM service's actions, resources, and condition keys", "from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from", "to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else: # get IAM", "first table is the list of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second", "resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text] = fields[1].text # save", "browser = webdriver.Firefox(options = options, executable_path = './geckodriver') # open the general page", "options) # browser = webdriver.Opera(options = options, executable_path = './operadriver') browser = webdriver.Firefox(options", "import time from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import", "for all the JSON elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError:", "IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10) try: # wait", "webdriver.Opera(options = options, executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path =", "1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if len(tables) > 0: previous_name", "# open the general page listing the actions, resource types, and condition keys", "is a resource type, scrape it and its ARN format if len(tables) >", "WebDriverWait(browser, 10) try: # wait until page has fully loaded all the JSON", "selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import Options options =", "WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options", "wait for all the JSON elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except", "scrape their tables for row in rows: a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href')", "scrapes the tables def get_tables(url): # browser = webdriver.Chrome(options = options) # browser", "the general page listing the actions, resource types, and condition keys for all", "tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if len(tables) > 0: previous_name = '' #", "a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints to", "action_name # save the constraints actions_json[namespace] = namespace_json namespace_json = dict() #if there", "namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables) > 0: # first table", "namespace_json = dict() if len(tables) > 0: previous_name = '' # store resource", "from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu')", "fully loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json = {}", "tables = browser.find_elements_by_tag_name('table') if len(tables) > 0: # first table is the list", "dump constraints to files file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close() file =", "= action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name =", "actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second table is the list of resource", "loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json = {} resources_json", "= webdriver.Firefox(options = options, executable_path = './geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10)", "time from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait", "0: # first table is the list of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr')", "= list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text] = fields[1].text # save the constraints", "executable_path = './geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10) try: # wait for", "get list of all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services and", "files file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close() file = open('resources.json', 'w') file.write(json.dumps(resources_json,", "Options from selenium.webdriver.firefox.options import Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in", "actions_json = {} resources_json = {} # get list of all services rows", "from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.chrome.options import", "= {} resources_json = {} # get list of all services rows =", "= Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a URL to a IAM service's", "iterate through services and scrape their tables for row in rows: a_path =", "scrapes AWS resource type and action mapping from AWS docs import json import", "0: previous_name = '' # store resource type -> actions mapping for action", "browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables) > 0: # first table is the", "listing the actions, resource types, and condition keys for all IAM services. aws_reference", "is the list of resource types if len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME,", "its ARN format if len(tables) > 1: for resource in resources: fields =", "= {} # get list of all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate", "selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support", "= tables[0].find_elements(By.TAG_NAME, 'tr') # second table is the list of resource types if", "open the general page listing the actions, resource types, and condition keys for", "wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json = {} resources_json = {} # get list", "save the constraints resources_json[namespace] = namespace_json finally: browser.close() # browser = webdriver.Chrome(options =", "# save the constraints actions_json[namespace] = namespace_json namespace_json = dict() #if there is", "time.sleep(5) wait = WebDriverWait(browser, 10) try: # wait for all the JSON elements", "'tr') # second table is the list of resource types if len(tables) >", "fields[0].text.replace(' [permission only]', '') action_name = action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else:", "import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from", "-> actions mapping for action in actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields) ==", "actions, resource types, and condition keys for all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html'", "in a URL to a IAM service's actions, resources, and condition keys list", "# iterate through services and scrape their tables for row in rows: a_path", "[previous_name] elif len(fields) > 3: resource_type = str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission", "until page has fully loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass", "= webdriver.Firefox(options = options, executable_path = './geckodriver') # open the general page listing", "'./operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') browser.get(url) time.sleep(5) wait =", "actions, resources, and condition keys list and scrapes the tables def get_tables(url): #", "if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif len(fields) > 3:", "def get_tables(url): # browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options =", "before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else: # get IAM service name", "tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables) > 0: # first", "= str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission only]', '') action_name = action_name.lower() if", "of resource types if len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json =", "# first table is the list of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') #", "= './geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10) try: # wait for all", "IAM service's actions, resources, and condition keys list and scrapes the tables def", "a IAM service's actions, resources, and condition keys list and scrapes the tables", "row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints to files file", "> 1: namespace_json[fields[0].text] = fields[1].text # save the constraints resources_json[namespace] = namespace_json finally:", "# takes in a URL to a IAM service's actions, resources, and condition", "types, and condition keys for all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait", "import expected_conditions as EC from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import Options from", "print('{}...done'.format(url)) browser.quit() # dump constraints to files file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4))", "AWS docs import json import time from selenium import webdriver from selenium.webdriver.common.by import", "dict() if len(tables) > 0: previous_name = '' # store resource type ->", "action_name = fields[0].text.replace(' [permission only]', '') action_name = action_name.lower() if resource_type in namespace_json:", "> 0: previous_name = '' # store resource type -> actions mapping for", "= './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') browser.get(url) time.sleep(5) wait", "list of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second table is the list", "type -> actions mapping for action in actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields)", "'')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif len(fields) >", "only]', '') action_name = action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] =", "= fields[1].text # save the constraints resources_json[namespace] = namespace_json finally: browser.close() # browser", "= options, executable_path = './geckodriver') # open the general page listing the actions,", "browser = webdriver.Firefox(options = options, executable_path = './geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser,", "import Options from selenium.webdriver.firefox.options import Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes", "== 3: resource_type = str(fields[0].text.replace('*', '')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type]", "wait = WebDriverWait(browser, 10) try: # wait until page has fully loaded all", "types if len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if", "list of resource types if len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json", "= row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints to files", "= options, executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver')", "wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else: # get IAM service name and tables", "the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json = {} resources_json = {}", "len(fields) > 1: namespace_json[fields[0].text] = fields[1].text # save the constraints resources_json[namespace] = namespace_json", "executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') browser.get(url) time.sleep(5)", "constraints to files file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close() file = open('resources.json',", "list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text] = fields[1].text # save the constraints resources_json[namespace]", "name and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables) > 0:", "options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a URL to a IAM", "[action_name] previous_name = action_name # save the constraints actions_json[namespace] = namespace_json namespace_json =", "selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import Options", "namespace_json = dict() #if there is a resource type, scrape it and its", "WebDriverWait(browser, 10) try: # wait for all the JSON elements to load before", "file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close() file = open('resources.json', 'w') file.write(json.dumps(resources_json, indent=4))", "selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.chrome.options import Options", "the actions, resource types, and condition keys for all IAM services. aws_reference =", "wait = WebDriverWait(browser, 10) try: # wait for all the JSON elements to", "elif len(fields) > 3: resource_type = str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission only]',", "resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif len(fields) > 3: resource_type", "# wait for all the JSON elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view')))", "page has fully loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json", "the list of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second table is the", "len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if len(tables) >", "options, executable_path = './geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10) try: # wait", "= fields[0].text.replace(' [permission only]', '') action_name = action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name)", "fields = list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type = str(fields[0].text.replace('*', '')) if resource_type", "and condition keys for all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait =", "services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10) try: # wait until", "dict() #if there is a resource type, scrape it and its ARN format", "1: namespace_json[fields[0].text] = fields[1].text # save the constraints resources_json[namespace] = namespace_json finally: browser.close()", "webdriver.Chrome(options = options) # browser = webdriver.Opera(options = options, executable_path = './operadriver') browser", "in rows: a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump", "tables[0].find_elements(By.TAG_NAME, 'tr') # second table is the list of resource types if len(tables)", "of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second table is the list of", "namespace_json[resource_type] = [previous_name] elif len(fields) > 3: resource_type = str(fields[3].text.replace('*', '')) action_name =", "if len(tables) > 0: # first table is the list of actions actions", "By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.chrome.options", "it and its ARN format if len(tables) > 1: for resource in resources:", "> 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if len(tables) > 0:", "import Options from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import Options options = Options()", "browser.close() # browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options = options,", "wait until page has fully loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except:", "from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as", "namespace_json[fields[0].text] = fields[1].text # save the constraints resources_json[namespace] = namespace_json finally: browser.close() #", "browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options = options, executable_path =", "executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') # open", "> 0: # first table is the list of actions actions = tables[0].find_elements(By.TAG_NAME,", "condition keys list and scrapes the tables def get_tables(url): # browser = webdriver.Chrome(options", "#if there is a resource type, scrape it and its ARN format if", "ARN format if len(tables) > 1: for resource in resources: fields = list(resource.find_elements_by_tag_name('td'))", "namespace_json namespace_json = dict() #if there is a resource type, scrape it and", "to files file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close() file = open('resources.json', 'w')", "> 1: for resource in resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields) > 1:", "# browser = webdriver.Opera(options = options, executable_path = './operadriver') browser = webdriver.Firefox(options =", "in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name = action_name # save the", "store resource type -> actions mapping for action in actions: fields = list(action.find_elements_by_tag_name('td'))", "action_name = action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name", "if len(fields) > 1: namespace_json[fields[0].text] = fields[1].text # save the constraints resources_json[namespace] =", "scrape it and its ARN format if len(tables) > 1: for resource in", "action mapping from AWS docs import json import time from selenium import webdriver", "import Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a URL to", "and scrape their tables for row in rows: a_path = row.find_elements_by_tag_name('a')[0] url =", "JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json = {} resources_json = {} #", "'./operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') # open the general", "type and action mapping from AWS docs import json import time from selenium", "{} resources_json = {} # get list of all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\")", "in resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text] = fields[1].text #", "in actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type = str(fields[0].text.replace('*', ''))", "if len(fields) == 3: resource_type = str(fields[0].text.replace('*', '')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name)", "and scrapes the tables def get_tables(url): # browser = webdriver.Chrome(options = options) #", "'' # store resource type -> actions mapping for action in actions: fields", "as EC from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import", "webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions", "= WebDriverWait(browser, 10) try: # wait for all the JSON elements to load", "get_tables(url): # browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options = options,", "action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name = action_name", "AWS resource type and action mapping from AWS docs import json import time", "try: # wait until page has fully loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME,", "= dict() #if there is a resource type, scrape it and its ARN", "= WebDriverWait(browser, 10) try: # wait until page has fully loaded all the", "= open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close() file = open('resources.json', 'w') file.write(json.dumps(resources_json, indent=4)) file.close()", "action in actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type = str(fields[0].text.replace('*',", "resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name = action_name # save", "= './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') # open the", "json import time from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui", "selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') #", "and condition keys list and scrapes the tables def get_tables(url): # browser =", "else: namespace_json[resource_type] = [action_name] previous_name = action_name # save the constraints actions_json[namespace] =", "the tables def get_tables(url): # browser = webdriver.Chrome(options = options) # browser =", "= options, executable_path = './geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10) try: #", "if len(tables) > 0: previous_name = '' # store resource type -> actions", "the constraints resources_json[namespace] = namespace_json finally: browser.close() # browser = webdriver.Chrome(options = options)", "# dump constraints to files file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close() file", "'./geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10) try: # wait for all the", "constraints resources_json[namespace] = namespace_json finally: browser.close() # browser = webdriver.Chrome(options = options) #", "= action_name # save the constraints actions_json[namespace] = namespace_json namespace_json = dict() #if", "Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a URL to a IAM service's actions,", "load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else: # get IAM service", "options, executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') browser.get(url)", "the list of resource types if len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr')", "resource in resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text] = fields[1].text", "except: pass actions_json = {} resources_json = {} # get list of all", "resource type -> actions mapping for action in actions: fields = list(action.find_elements_by_tag_name('td')) if", "namespace_json[resource_type] = [action_name] previous_name = action_name # save the constraints actions_json[namespace] = namespace_json", "= namespace_json finally: browser.close() # browser = webdriver.Chrome(options = options) # browser =", "try: # wait for all the JSON elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME,", "= a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints to files file = open('actions.json',", "page listing the actions, resource types, and condition keys for all IAM services.", "for all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10) try:", "namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif len(fields) > 3: resource_type = str(fields[3].text.replace('*', ''))", "TimeoutError: pass else: # get IAM service name and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text", "import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.chrome.options import Options from", "Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a URL to a", "JSON elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else: #", "'') action_name = action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name]", "fields[1].text # save the constraints resources_json[namespace] = namespace_json finally: browser.close() # browser =", "URL to a IAM service's actions, resources, and condition keys list and scrapes", "= 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10) try: # wait until page has", "tables for row in rows: a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url))", "else: namespace_json[resource_type] = [previous_name] elif len(fields) > 3: resource_type = str(fields[3].text.replace('*', '')) action_name", "actions_json[namespace] = namespace_json namespace_json = dict() #if there is a resource type, scrape", "finally: browser.close() # browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options =", "condition keys for all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser,", "mapping from AWS docs import json import time from selenium import webdriver from", "aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10) try: # wait until page", "all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services and scrape their tables", "pass actions_json = {} resources_json = {} # get list of all services", "= str(fields[0].text.replace('*', '')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif", "= options) # browser = webdriver.Opera(options = options, executable_path = './operadriver') browser =", "IAM service name and tables namespace = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables)", "resource type and action mapping from AWS docs import json import time from", "selenium.webdriver.firefox.options import Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a URL", "resources_json[namespace] = namespace_json finally: browser.close() # browser = webdriver.Chrome(options = options) # browser", "browser = webdriver.Opera(options = options, executable_path = './operadriver') browser = webdriver.Firefox(options = options,", "elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else: # get", "list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type = str(fields[0].text.replace('*', '')) if resource_type in namespace_json:", "docs import json import time from selenium import webdriver from selenium.webdriver.common.by import By", "from AWS docs import json import time from selenium import webdriver from selenium.webdriver.common.by", "keys list and scrapes the tables def get_tables(url): # browser = webdriver.Chrome(options =", "'tr') namespace_json = dict() if len(tables) > 0: previous_name = '' # store", "through services and scrape their tables for row in rows: a_path = row.find_elements_by_tag_name('a')[0]", "resource_type = str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission only]', '') action_name = action_name.lower()", "rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services and scrape their tables for row", "# save the constraints resources_json[namespace] = namespace_json finally: browser.close() # browser = webdriver.Chrome(options", "has fully loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json =", "actions mapping for action in actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields) == 3:", "from selenium.webdriver.firefox.options import Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a", "resource type, scrape it and its ARN format if len(tables) > 1: for", "format if len(tables) > 1: for resource in resources: fields = list(resource.find_elements_by_tag_name('td')) if", "constraints actions_json[namespace] = namespace_json namespace_json = dict() #if there is a resource type,", "import json import time from selenium import webdriver from selenium.webdriver.common.by import By from", "= browser.find_elements_by_xpath(\"//div[@id='main-col-body']/p/code\")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables) > 0: # first table is", "browser.get(aws_reference) wait = WebDriverWait(browser, 10) try: # wait until page has fully loaded", "there is a resource type, scrape it and its ARN format if len(tables)", "= '' # store resource type -> actions mapping for action in actions:", "resources_json = {} # get list of all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") #", "all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10) try: #", "to a IAM service's actions, resources, and condition keys list and scrapes the", "1: for resource in resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text]", "selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC", "options.add_argument('--disable-gpu') # takes in a URL to a IAM service's actions, resources, and", "from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import Options options", "# scrapes AWS resource type and action mapping from AWS docs import json", "{} # get list of all services rows = browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through", "'')) action_name = fields[0].text.replace(' [permission only]', '') action_name = action_name.lower() if resource_type in", "len(tables) > 1: for resource in resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields) >", "= browser.find_elements_by_xpath(\"//div[@id='main-col-body']/div[@class='highlights']/ul/li\") # iterate through services and scrape their tables for row in", "len(tables) > 0: # first table is the list of actions actions =", "resource types, and condition keys for all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference)", "len(fields) > 3: resource_type = str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission only]', '')", "is the list of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second table is", "tables def get_tables(url): # browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options", "resource types if len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict()", "= list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type = str(fields[0].text.replace('*', '')) if resource_type in", "'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10) try: # wait until page has fully", "rows: a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints", "namespace_json finally: browser.close() # browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options", "10) try: # wait until page has fully loaded all the JSON elements", "in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif len(fields) > 3: resource_type =", "namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name = action_name # save the constraints", "general page listing the actions, resource types, and condition keys for all IAM", "options, executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') #", "fields = list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text] = fields[1].text # save the", "len(tables) > 0: previous_name = '' # store resource type -> actions mapping", "previous_name = action_name # save the constraints actions_json[namespace] = namespace_json namespace_json = dict()", "if len(tables) > 1: for resource in resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields)", "previous_name = '' # store resource type -> actions mapping for action in", "url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints to files file =", "actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second table is the list of resource types", "from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import", "= namespace_json namespace_json = dict() #if there is a resource type, scrape it", "= tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if len(tables) > 0: previous_name = ''", "service's actions, resources, and condition keys list and scrapes the tables def get_tables(url):", "the constraints actions_json[namespace] = namespace_json namespace_json = dict() #if there is a resource", "for resource in resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text] =", "for action in actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type =", "= [previous_name] elif len(fields) > 3: resource_type = str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace('" ]
[ "1): ##print(\"Match found\") counter = counter + 1 ##print(counter) no_of_pairs = no_of_pairs +", "##Declare and initialize variables for i in range(0, len(br), 1): for j in", "variables for i in range(0, len(br), 1): for j in range(0, len(ar), 1):", "array ar.sort() br = ar br = list(set(br)) ##print(br) no_of_pairs = 0 counter", "Libraries import sys def sockMerchant(n, ar): ##Sort the array ar.sort() br = ar", "1): ##print(ar[j], br[i]) if int(ar[j]/br[i] == 1): ##print(\"Match found\") counter = counter +", "sockMerchant(n, ar): ##Sort the array ar.sort() br = ar br = list(set(br)) ##print(br)", "counter = 0 return(no_of_pairs) n = int(input().strip()) ar = list(map(int, input().strip().split(' '))) result", "br[i]) if int(ar[j]/br[i] == 1): ##print(\"Match found\") counter = counter + 1 ##print(counter)", "##print(counter) no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs) n =", "j in range(0, len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i] == 1): ##print(\"Match found\")", "0 return(no_of_pairs) n = int(input().strip()) ar = list(map(int, input().strip().split(' '))) result = sockMerchant(n,", "import sys def sockMerchant(n, ar): ##Sort the array ar.sort() br = ar br", "sys def sockMerchant(n, ar): ##Sort the array ar.sort() br = ar br =", "##print(br) no_of_pairs = 0 counter = 0 ##Declare and initialize variables for i", "= list(set(br)) ##print(br) no_of_pairs = 0 counter = 0 ##Declare and initialize variables", "range(0, len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i] == 1): ##print(\"Match found\") counter =", "0 counter = 0 ##Declare and initialize variables for i in range(0, len(br),", "= counter + 1 ##print(counter) no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs) counter =", "#Import Libraries import sys def sockMerchant(n, ar): ##Sort the array ar.sort() br =", "counter + 1 ##print(counter) no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs) counter = 0", "initialize variables for i in range(0, len(br), 1): for j in range(0, len(ar),", "for j in range(0, len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i] == 1): ##print(\"Match", "##print(no_of_pairs) counter = 0 return(no_of_pairs) n = int(input().strip()) ar = list(map(int, input().strip().split(' ')))", "##print(ar[j], br[i]) if int(ar[j]/br[i] == 1): ##print(\"Match found\") counter = counter + 1", "no_of_pairs = 0 counter = 0 ##Declare and initialize variables for i in", "len(br), 1): for j in range(0, len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i] ==", "+ 1 ##print(counter) no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs)", "in range(0, len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i] == 1): ##print(\"Match found\") counter", "counter = counter + 1 ##print(counter) no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs) counter", "ar br = list(set(br)) ##print(br) no_of_pairs = 0 counter = 0 ##Declare and", "no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs) n = int(input().strip())", "##Sort the array ar.sort() br = ar br = list(set(br)) ##print(br) no_of_pairs =", "no_of_pairs + int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs) n = int(input().strip()) ar =", "n = int(input().strip()) ar = list(map(int, input().strip().split(' '))) result = sockMerchant(n, ar) print(result)", "and initialize variables for i in range(0, len(br), 1): for j in range(0,", "counter = 0 ##Declare and initialize variables for i in range(0, len(br), 1):", "= ar br = list(set(br)) ##print(br) no_of_pairs = 0 counter = 0 ##Declare", "list(set(br)) ##print(br) no_of_pairs = 0 counter = 0 ##Declare and initialize variables for", "1 ##print(counter) no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs) n", "#!/bin/python3 #Import Libraries import sys def sockMerchant(n, ar): ##Sort the array ar.sort() br", "int(ar[j]/br[i] == 1): ##print(\"Match found\") counter = counter + 1 ##print(counter) no_of_pairs =", "##print(\"Match found\") counter = counter + 1 ##print(counter) no_of_pairs = no_of_pairs + int(counter/2)", "= 0 ##Declare and initialize variables for i in range(0, len(br), 1): for", "= 0 return(no_of_pairs) n = int(input().strip()) ar = list(map(int, input().strip().split(' '))) result =", "len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i] == 1): ##print(\"Match found\") counter = counter", "found\") counter = counter + 1 ##print(counter) no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs)", "the array ar.sort() br = ar br = list(set(br)) ##print(br) no_of_pairs = 0", "def sockMerchant(n, ar): ##Sort the array ar.sort() br = ar br = list(set(br))", "return(no_of_pairs) n = int(input().strip()) ar = list(map(int, input().strip().split(' '))) result = sockMerchant(n, ar)", "if int(ar[j]/br[i] == 1): ##print(\"Match found\") counter = counter + 1 ##print(counter) no_of_pairs", "= 0 counter = 0 ##Declare and initialize variables for i in range(0,", "br = list(set(br)) ##print(br) no_of_pairs = 0 counter = 0 ##Declare and initialize", "in range(0, len(br), 1): for j in range(0, len(ar), 1): ##print(ar[j], br[i]) if", "== 1): ##print(\"Match found\") counter = counter + 1 ##print(counter) no_of_pairs = no_of_pairs", "ar.sort() br = ar br = list(set(br)) ##print(br) no_of_pairs = 0 counter =", "int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs) n = int(input().strip()) ar = list(map(int, input().strip().split('", "+ int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs) n = int(input().strip()) ar = list(map(int,", "ar): ##Sort the array ar.sort() br = ar br = list(set(br)) ##print(br) no_of_pairs", "= no_of_pairs + int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs) n = int(input().strip()) ar", "1): for j in range(0, len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i] == 1):", "0 ##Declare and initialize variables for i in range(0, len(br), 1): for j", "for i in range(0, len(br), 1): for j in range(0, len(ar), 1): ##print(ar[j],", "br = ar br = list(set(br)) ##print(br) no_of_pairs = 0 counter = 0", "range(0, len(br), 1): for j in range(0, len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i]", "i in range(0, len(br), 1): for j in range(0, len(ar), 1): ##print(ar[j], br[i])" ]
[ "https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed", "\"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ), ), ( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \"", "None: \"\"\"Perform a general parse of the test files.\"\"\" for data_file in datafiles.listdir():", "'python_version >= \"3.5\" and python_full_version < \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and", "(\"==\", \"platform_version\", \"2\"), ), ), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ),", "import py # https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry,", ") ], ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "), ], ), ), ( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"],", "\"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\",", "Authors Licensed under the Apache License, Version 2.0 (the \"License\"); you may not", "exception raised for dodgy file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles:", "\"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"", "--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ],", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\",", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging== 20.7\", RequirementEntry(", "\"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "Path import py # https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import ( RequirementEntry, parse_requirements_file,", ">= \"3.4.0\" and python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\" \"", "\"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"),", "versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging == 20.7\", RequirementEntry( package=\"packaging\",", "-> None: \"\"\"Perform a general parse of the test files.\"\"\" for data_file in", "\" and (python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"],", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging ==20.7\", RequirementEntry(", "permissions and limitations under the License. \"\"\" import os from pathlib import Path", "versions=[], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\",", "( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\",", "), ( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ),", "entry[1].versions assert req.extras == entry[1].extras assert req.environment_markers == entry[1].environment_markers assert req.hashes == entry[1].hashes", "the License for the specific language governing permissions and limitations under the License.", "], ), ), ( \"packaging[quux, strange]==99; os_name=='os2'\" \" and (python_version<'2.7' and platform_version=='2')\" \"", "python_full_version >= \"3.4.0\" and python_version >= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=(", "( \"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\", RequirementEntry(", "\"python_version\", \"3.5\"), ), ), hashes=None, ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\"", "License for the specific language governing permissions and limitations under the License. \"\"\"", "\"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"), ), ), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ],", "extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "Unless required by applicable law or agreed to in writing, software distributed under", "a general parse of the test files.\"\"\" for data_file in datafiles.listdir(): assert len(parse_requirements_file(Path(data_file)))", "extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ), (", "/ j) for j in os.listdir(REQ_DIR) if j[-3:] == \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files)", "\"3.4.0\" and python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing,", "@ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform a general parse of the test", "valiant.util.requirements. Copyright 2021 The Valiant Authors Licensed under the Apache License, Version 2.0", "= [os.path.join(REQ_DIR / j) for j in os.listdir(REQ_DIR) if j[-3:] == \"txt\"] ALL_REQ_FILES", "), ], ), ), ( \"packaging[quux, strange]==99; os_name=='os2'\" \" and (python_version<'2.7' and platform_version=='2')\"", "strange]==99; os_name=='os2'\" \" and (python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")],", "), ), ( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None,", "\"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [ ( \"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[],", "\" 'python_version >= \"3.5\" and python_full_version < \"3.0.0\"' ' or python_full_version >= \"3.4.0\"", "the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "_req_files = [os.path.join(REQ_DIR / j) for j in os.listdir(REQ_DIR) if j[-3:] == \"txt\"]", ">= \"3.5\" and python_full_version < \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version", "License, Version 2.0 (the \"License\"); you may not use this file except in", "\"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging == 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "assert req.versions == entry[1].versions assert req.extras == entry[1].extras assert req.environment_markers == entry[1].environment_markers assert", "), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=None, ),", "(python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\",", "\"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", (", "entry[1].extras assert req.environment_markers == entry[1].environment_markers assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception() -> None:", "==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \" packaging==", "for the specific language governing permissions and limitations under the License. \"\"\" import", "strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ), ), ( \"packaging[quux,", "== entry[1].package assert req.versions == entry[1].versions assert req.extras == entry[1].extras assert req.environment_markers ==", "\"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=None, ), ), ( \"packaging==20.7; \" 'python_version", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ), ), ( \"packaging[quux, strange]==20.7\" \"", "software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "by applicable law or agreed to in writing, software distributed under the License", "hashes=None, ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version < \"3.0.0\"'", "( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[", "), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==99; os_name=='os2'\" \"", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License", "import pytest from valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__))", "hashes=None, ), ), ( \"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None,", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \" packaging== 20.7", "copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", ">= \"3.4.0\" and python_version >= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\",", "20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\"", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"\"\" import os from pathlib import Path import py # https://py.readthedocs.io/en/latest/index.html import pytest", "the License. \"\"\" import os from pathlib import Path import py # https://py.readthedocs.io/en/latest/index.html", "parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform a general parse of", "in compliance with the License. You may obtain a copy of the License", "\"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\",", "Valiant Authors Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "KIND, either express or implied. See the License for the specific language governing", "extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "and python_version >= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\",", "\"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "( \"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), (", "in writing, software distributed under the License is distributed on an \"AS IS\"", "and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\",", "writing, software distributed under the License is distributed on an \"AS IS\" BASIS,", "with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform a general", "( \"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\",", "or agreed to in writing, software distributed under the License is distributed on", "( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\"", "or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry(", "), ), ( \"packaging[quux, strange]==99; os_name=='os2'\" \" and (python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\",", "( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=None, ), ),", "from pathlib import Path import py # https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import", "entry[1].package assert req.versions == entry[1].versions assert req.extras == entry[1].extras assert req.environment_markers == entry[1].environment_markers", "\"platform_version\", \"2\"), ), ), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ), ]", "requirement scenarios.\"\"\" for entry in tests: req = parse_requirements_entry(entry[0]) assert req.package == entry[1].package", "the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) /", "= [ ( \"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None, ), ), (", "--hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), (", "package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "in os.listdir(REQ_DIR) if j[-3:] == \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [ (", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ), ), ( \"packaging[quux, strange]==20.7\"", "and python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "raised for dodgy file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path)", "python_version >= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\",", "environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ), ( \"and\",", "), ), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ), ] def test_parse_requirements_entry()", "OR CONDITIONS OF ANY KIND, either express or implied. See the License for", "versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ), ), ( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\"", "OF ANY KIND, either express or implied. See the License for the specific", "\"os2\"), ( \"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"), ), ), hashes=[ (", "j) for j in os.listdir(REQ_DIR) if j[-3:] == \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests", "(\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=None, ), ), ( \"packaging==20.7;", "\"strange\"], environment_markers=None, hashes=None, ), ), ( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry(", "], ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version < \"3.0.0\"'", "\" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\",", "under the License. \"\"\" import os from pathlib import Path import py #", "req.environment_markers == entry[1].environment_markers assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception() -> None: \"\"\"Check exception", "], ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "may not use this file except in compliance with the License. You may", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging ==20.7\", RequirementEntry( package=\"packaging\",", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "\"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==99; os_name=='os2'\" \" and (python_version<'2.7' and", "versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\",", "test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform a general parse of the test files.\"\"\" for", "\"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==20.7\", RequirementEntry(", "[os.path.join(REQ_DIR / j) for j in os.listdir(REQ_DIR) if j[-3:] == \"txt\"] ALL_REQ_FILES =", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert req.versions == entry[1].versions assert req.extras == entry[1].extras", "), ), ] def test_parse_requirements_entry() -> None: \"\"\"Work through different requirement scenarios.\"\"\" for", "\"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ), ] def test_parse_requirements_entry() -> None: \"\"\"Work through different", "python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=(", "environment_markers=None, hashes=None, ), ), ( \"packaging == 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging", "packaging== 20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), (", "), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ),", "' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "hashes=None, ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None,", "\"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ), ), (", "( \" packaging== 20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ),", "), ), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ),", "\" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\",", "\"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ),", "versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \" packaging== 20.7 \", RequirementEntry(", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\",", "\"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=None, ), ), (", "\"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ), ] def test_parse_requirements_entry() -> None: \"\"\"Work through", "valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir)", "See the License for the specific language governing permissions and limitations under the", "= pytest.mark.datafiles(*_req_files) tests = [ ( \"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None,", "governing permissions and limitations under the License. \"\"\" import os from pathlib import", "entry in tests: req = parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert req.versions ==", "environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ),", "(\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\",", "py.path.local(_dir) / \"req_data\" _req_files = [os.path.join(REQ_DIR / j) for j in os.listdir(REQ_DIR) if", "/ \"req_data\" _req_files = [os.path.join(REQ_DIR / j) for j in os.listdir(REQ_DIR) if j[-3:]", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"), ( \"and\",", "\" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ),", "\" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\",", "this file except in compliance with the License. You may obtain a copy", "hashes=None, ), ), ( \" packaging== 20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "os_name=='os2'\" \" and (python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\",", "( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==99; os_name=='os2'\" \" and", "\"3.5\"), ), ), hashes=None, ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and", "\"License\"); you may not use this file except in compliance with the License.", "None: \"\"\"Check exception raised for dodgy file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES", "\"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==20.7\",", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "you may not use this file except in compliance with the License. You", "), ( \"packaging == 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ),", "agreed to in writing, software distributed under the License is distributed on an", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \" packaging== 20.7 \",", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "\"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging==20.7; \" 'python_version", "and limitations under the License. \"\"\" import os from pathlib import Path import", "extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"), ( \"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\",", "with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0", "\"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"),", "), ), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ),", "implied. See the License for the specific language governing permissions and limitations under", "versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ), (", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\",", "), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[", "\"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==99;", "platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\",", "general parse of the test files.\"\"\" for data_file in datafiles.listdir(): assert len(parse_requirements_file(Path(data_file))) >", "package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"), ( \"and\", (\"<\",", "), ], ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version <", "dodgy file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None:", "), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\",", "at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software", "), ), hashes=None, ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version", "\"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==99; os_name=='os2'\"", "extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging == 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "use this file except in compliance with the License. You may obtain a", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use", "RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "\"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging==20.7; \"", "Copyright 2021 The Valiant Authors Licensed under the Apache License, Version 2.0 (the", "path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform a", "assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception() -> None: \"\"\"Check exception raised for dodgy", "hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \"", "REQ_DIR = py.path.local(_dir) / \"req_data\" _req_files = [os.path.join(REQ_DIR / j) for j in", "), ), ( \"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ),", "\" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\",", "in tests: req = parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert req.versions == entry[1].versions", "py.path) -> None: \"\"\"Perform a general parse of the test files.\"\"\" for data_file", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), (", "\"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ), ( \"and\", (\">=\",", "required by applicable law or agreed to in writing, software distributed under the", "), ), ( \"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ),", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\",", "\"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and", "(\"<\", \"python_full_version\", \"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ),", "# https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir", "def test_parse_requirements_entry() -> None: \"\"\"Work through different requirement scenarios.\"\"\" for entry in tests:", "\"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"), ), ), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\",", "os from pathlib import Path import py # https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util", "parse of the test files.\"\"\" for data_file in datafiles.listdir(): assert len(parse_requirements_file(Path(data_file))) > 0", "environment_markers=None, hashes=None, ), ), ( \" packaging== 20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None,", "versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\",", "https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir =", "--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ ( \"sha256\",", "\"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=None, ), ), ( \"packaging==20.7; \"", "extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "\"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\",", "( \"packaging[quux, strange]==99; os_name=='os2'\" \" and (python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\",", "), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version < \"3.0.0\"' ' or", ">= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\",", "), ), ( \"packaging == 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None,", "\" packaging== 20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ),", "versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ),", "\"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "not use this file except in compliance with the License. You may obtain", "), ), ( \" packaging== 20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None,", "), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version < \"3.0.0\"' '", "( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version < \"3.0.0\"' ' or python_full_version", "obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "], ), ), ( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None,", "environment_markers=None, hashes=None, ), ), ( \"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None,", "), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ (", "py # https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, )", "( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"],", "<gh_stars>1-10 \"\"\"Test for valiant.util.requirements. Copyright 2021 The Valiant Authors Licensed under the Apache", "limitations under the License. \"\"\" import os from pathlib import Path import py", "), ( \"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ),", "extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ],", "\"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\",", "for j in os.listdir(REQ_DIR) if j[-3:] == \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests =", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry(", "a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "the specific language governing permissions and limitations under the License. \"\"\" import os", "< \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"', RequirementEntry( package=\"packaging\",", "ANY KIND, either express or implied. See the License for the specific language", "environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "file except in compliance with the License. You may obtain a copy of", "tests = [ ( \"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None, ), ),", "\"\"\"Check exception raised for dodgy file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def", "== \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [ ( \"packaging\", RequirementEntry( package=\"packaging\", versions=[],", "python_full_version < \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"', RequirementEntry(", "( \"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"), ), ), hashes=[ ( \"sha256\",", "\"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "python_full_version < \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"' \"", "_dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / \"req_data\" _req_files = [os.path.join(REQ_DIR / j)", "2.0 (the \"License\"); you may not use this file except in compliance with", "environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"), ( \"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"),", "\"2\"), ), ), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ), ] def", "pathlib import Path import py # https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import (", "( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / \"req_data\"", "versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==", "( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging==20.7;", "pytest.mark.datafiles(*_req_files) tests = [ ( \"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None, ),", "(the \"License\"); you may not use this file except in compliance with the", "environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\"", "\"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version < \"3.0.0\"' ' or python_full_version >=", "pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform a general parse", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging == 20.7\",", "j[-3:] == \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [ ( \"packaging\", RequirementEntry( package=\"packaging\",", "os.listdir(REQ_DIR) if j[-3:] == \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [ ( \"packaging\",", "( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ), ] def test_parse_requirements_entry() -> None: \"\"\"Work", "( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=[ ( \"sha256\",", "(\">=\", \"python_version\", \"3.5\"), ), ), hashes=None, ), ), ( \"packaging==20.7; \" 'python_version >=", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ),", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging== 20.7\", RequirementEntry( package=\"packaging\",", "scenarios.\"\"\" for entry in tests: req = parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert", "entry[1].environment_markers assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception() -> None: \"\"\"Check exception raised for", "\"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"), ( \"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\",", "\"3.5\"), ), ), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ],", "\"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ),", "\"and\", (\"==\", \"os_name\", \"os2\"), ( \"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"), ),", "for valiant.util.requirements. Copyright 2021 The Valiant Authors Licensed under the Apache License, Version", "environment_markers=None, hashes=None, ), ), ( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\",", "-> None: \"\"\"Work through different requirement scenarios.\"\"\" for entry in tests: req =", "req.versions == entry[1].versions assert req.extras == entry[1].extras assert req.environment_markers == entry[1].environment_markers assert req.hashes", "parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform a general parse of the", "hashes=None, ), ), ( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "for entry in tests: req = parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert req.versions", "You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by", "may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ), ),", "assert req.extras == entry[1].extras assert req.environment_markers == entry[1].environment_markers assert req.hashes == entry[1].hashes def", "law or agreed to in writing, software distributed under the License is distributed", "--hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ),", "' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\",", "), ( \"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ),", "def test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform a general parse of the test files.\"\"\"", "Version 2.0 (the \"License\"); you may not use this file except in compliance", "], ), ), ] def test_parse_requirements_entry() -> None: \"\"\"Work through different requirement scenarios.\"\"\"", "req.extras == entry[1].extras assert req.environment_markers == entry[1].environment_markers assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception()", "the Apache License, Version 2.0 (the \"License\"); you may not use this file", "\"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\",", "(\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ),", "), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging==20.7; \" 'python_version >=", "-> None: \"\"\"Check exception raised for dodgy file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\"))", "20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging ==", "( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "\" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"),", "language governing permissions and limitations under the License. \"\"\" import os from pathlib", "under the Apache License, Version 2.0 (the \"License\"); you may not use this", "\"req_data\" _req_files = [os.path.join(REQ_DIR / j) for j in os.listdir(REQ_DIR) if j[-3:] ==", "\"packaging == 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), (", "either express or implied. See the License for the specific language governing permissions", "\"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \"", "\"2.7\"), (\"==\", \"platform_version\", \"2\"), ), ), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ),", "versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "Apache License, Version 2.0 (the \"License\"); you may not use this file except", "or implied. See the License for the specific language governing permissions and limitations", "), ] def test_parse_requirements_entry() -> None: \"\"\"Work through different requirement scenarios.\"\"\" for entry", "\"\"\"Work through different requirement scenarios.\"\"\" for entry in tests: req = parse_requirements_entry(entry[0]) assert", "environment_markers=None, hashes=None, ), ), ( \"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None,", "ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [ ( \"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None,", "pytest from valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR", "strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[", "or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[],", "), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), (", "and python_full_version < \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"',", "req.hashes == entry[1].hashes def test_parse_requirements_file_exception() -> None: \"\"\"Check exception raised for dodgy file", "from valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR =", "and (python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=(", "\"\"\"Test for valiant.util.requirements. Copyright 2021 The Valiant Authors Licensed under the Apache License,", "] def test_parse_requirements_entry() -> None: \"\"\"Work through different requirement scenarios.\"\"\" for entry in", "= py.path.local(_dir) / \"req_data\" _req_files = [os.path.join(REQ_DIR / j) for j in os.listdir(REQ_DIR)", "CONDITIONS OF ANY KIND, either express or implied. See the License for the", "--hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"), (", "--hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"),", "extras=[\"quux\", \"strange\"], environment_markers=None, hashes=None, ), ), ( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\",", "(\"==\", \"os_name\", \"os2\"), ( \"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"), ), ),", "to in writing, software distributed under the License is distributed on an \"AS", "\"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==99; os_name=='os2'\" \" and (python_version<'2.7'", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging == 20.7\", RequirementEntry(", "import Path import py # https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import ( RequirementEntry,", "except in compliance with the License. You may obtain a copy of the", "\"python_version\", \"3.5\"), ), ), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ),", "entry[1].hashes def test_parse_requirements_file_exception() -> None: \"\"\"Check exception raised for dodgy file path.\"\"\" with", "\" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ (", "\"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\",", "== entry[1].versions assert req.extras == entry[1].extras assert req.environment_markers == entry[1].environment_markers assert req.hashes ==", "assert req.environment_markers == entry[1].environment_markers assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception() -> None: \"\"\"Check", "None: \"\"\"Work through different requirement scenarios.\"\"\" for entry in tests: req = parse_requirements_entry(entry[0])", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "extras=[], environment_markers=None, hashes=None, ), ), ( \" packaging== 20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"),", "== entry[1].hashes def test_parse_requirements_file_exception() -> None: \"\"\"Check exception raised for dodgy file path.\"\"\"", "\"3.4.0\" and python_version >= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", (", "), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=[ (", "\"os_name\", \"os2\"), ( \"and\", (\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"), ), ), hashes=[", "License. \"\"\" import os from pathlib import Path import py # https://py.readthedocs.io/en/latest/index.html import", "tests: req = parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert req.versions == entry[1].versions assert", "--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\",", "test_parse_requirements_file_exception() -> None: \"\"\"Check exception raised for dodgy file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\"))", "environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None,", "[ ( \"packaging\", RequirementEntry( package=\"packaging\", versions=[], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging==20.7\",", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ), ] def test_parse_requirements_entry() ->", "(\">=\", \"python_version\", \"3.5\"), ), ), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\",", "extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ),", "), ( \"packaging[quux, strange]==99; os_name=='os2'\" \" and (python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry(", "import os from pathlib import Path import py # https://py.readthedocs.io/en/latest/index.html import pytest from", "2021 The Valiant Authors Licensed under the Apache License, Version 2.0 (the \"License\");", "20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging ==20.7\",", "( \"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), (", "python_full_version >= \"3.4.0\" and python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\",", "== entry[1].environment_markers assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception() -> None: \"\"\"Check exception raised", "\"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry(", "= os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / \"req_data\" _req_files = [os.path.join(REQ_DIR / j) for", ") ], ), ), ] def test_parse_requirements_entry() -> None: \"\"\"Work through different requirement", "and python_full_version < \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"'", "The Valiant Authors Licensed under the Apache License, Version 2.0 (the \"License\"); you", "), ( \" packaging== 20.7 \", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None,", "\"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ), ( \"packaging==20.7\"", "), ( \"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None,", "compliance with the License. You may obtain a copy of the License at", "os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / \"req_data\" _req_files = [os.path.join(REQ_DIR / j) for j", "\"\"\"Perform a general parse of the test files.\"\"\" for data_file in datafiles.listdir(): assert", "specific language governing permissions and limitations under the License. \"\"\" import os from", "j in os.listdir(REQ_DIR) if j[-3:] == \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [", "express or implied. See the License for the specific language governing permissions and", "\"python_full_version\", \"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ),", "req.package == entry[1].package assert req.versions == entry[1].versions assert req.extras == entry[1].extras assert req.environment_markers", "\"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux, strange]==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\",", "\"packaging==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ (", "\"3.5\" and python_full_version < \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >=", "License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required", "test_parse_requirements_entry() -> None: \"\"\"Work through different requirement scenarios.\"\"\" for entry in tests: req", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ),", ") _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / \"req_data\" _req_files = [os.path.join(REQ_DIR /", "= parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert req.versions == entry[1].versions assert req.extras ==", "\" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", )", "assert req.package == entry[1].package assert req.versions == entry[1].versions assert req.extras == entry[1].extras assert", "through different requirement scenarios.\"\"\" for entry in tests: req = parse_requirements_entry(entry[0]) assert req.package", "for dodgy file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) ->", "applicable law or agreed to in writing, software distributed under the License is", "\"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"), ( \"and\", (\"<\", \"python_version\", \"2.7\"),", "the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless", "\"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "hashes=None, ), ), ( \"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ),", "\"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=None,", "\"packaging== 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging", "def test_parse_requirements_file_exception() -> None: \"\"\"Check exception raised for dodgy file path.\"\"\" with pytest.raises(ValueError):", "file path.\"\"\" with pytest.raises(ValueError): parse_requirements_file(Path(\"\")) parse_requirements_file(Path(\"/i_dont_exist\")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None: \"\"\"Perform", "== entry[1].extras assert req.environment_markers == entry[1].environment_markers assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception() ->", "\"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \" packaging== 20.7 \", RequirementEntry( package=\"packaging\",", "if j[-3:] == \"txt\"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [ ( \"packaging\", RequirementEntry(", "\"packaging[quux, strange]==99; os_name=='os2'\" \" and (python_version<'2.7' and platform_version=='2')\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\",", "hashes=None, ), ), ( \"packaging == 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None,", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ) ], ), ), ] def test_parse_requirements_entry() -> None:", "package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\",", "parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / \"req_data\" _req_files =", "\"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\", \"strange\"], environment_markers=None,", "(\"<\", \"python_version\", \"2.7\"), (\"==\", \"platform_version\", \"2\"), ), ), hashes=[ ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", )", "parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / \"req_data\" _req_files = [os.path.join(REQ_DIR", "hashes=None, ), ), ( \"packaging ==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None,", "RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\", ( \"and\", (\">=\", \"python_version\", \"3.5\"), (\"<\",", "RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / \"req_data\" _req_files", "hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), (", "< \"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\"", "( \"packaging == 20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ),", "), ), ( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")],", "different requirement scenarios.\"\"\" for entry in tests: req = parse_requirements_entry(entry[0]) assert req.package ==", ">= \"3.5\"' \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=( \"or\",", "\"packaging==20.7\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=None, ), ), ( \"packaging== 20.7\",", "\"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ],", "extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ) ], ), ), ( \"packaging==20.7\" \"", "), hashes=None, ), ), ( \"packaging==20.7; \" 'python_version >= \"3.5\" and python_full_version <", "versions=[(\"==\", \"99\")], extras=[\"quux\", \"strange\"], environment_markers=( \"and\", (\"==\", \"os_name\", \"os2\"), ( \"and\", (\"<\", \"python_version\",", "\"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=[", "\"3.0.0\"' ' or python_full_version >= \"3.4.0\" and python_version >= \"3.5\"', RequirementEntry( package=\"packaging\", versions=[(\"==\",", "--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\",", "req = parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert req.versions == entry[1].versions assert req.extras", "( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], ), ), ( \"packaging[quux,", "\"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"), ), ), hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), (", "), ( \"packaging[quux, strange]==20.7\" \" --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\" \" --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", RequirementEntry( package=\"packaging\", versions=[(\"==\", \"20.7\")], extras=[\"quux\",", "\"3.5\"), (\"<\", \"python_full_version\", \"3.0.0\"), ), ( \"and\", (\">=\", \"python_full_version\", \"3.4.0\"), (\">=\", \"python_version\", \"3.5\"),", "\"strange\"], environment_markers=None, hashes=[ ( \"sha256\", \"a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128\", ), ( \"sha256\", \"7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41\", ), ], )," ]
[ "), orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 ) ) # print(sphero.plot_time_list) #", "fig = go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') # py.plot(data, filename='error behaviour', auto_open=True) py.plot(fig,", "bordercolor='#FFFFFF', # borderwidth=2 ) ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour,", "monospace', size=28, color='#000' ) ), # showticklabels=False ), legend=dict( # x=0, # y=0,", "), # showticklabels=False ), legend=dict( # x=0, # y=0, x= 1.1, y= 1.2,", "plotly # import plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if", "if step_count == 40000: # if step_count == 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list,", "x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' )", "name='y direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour", "go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction'", "New, monospace', size=28, color='#000' ) ), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)',", "import plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if step_count ==", ") ), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New, monospace',", "borderwidth=2 ) ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour]", "# plotly # import plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count):", "# showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New, monospace', size=28, color='#000'", "go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x", "import plotly.graph_objs as go # plotly # import plotly # plotly.tools.set_credentials_file( # username='houterm',", "direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour = go.Scatter(", "traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2", "plotly.graph_objs as go # plotly # import plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey')", "go.Layout( title=go.layout.Title( # text='Error Behaviour 3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time", "orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 ) ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list)", "go # plotly # import plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero,", "(pixels)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ), legend=dict(", "y=0, x= 1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ), orientation=\"h\", #", "Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New, monospace', size=28,", "x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New, monospace', size=28, color='#000' )", "api_key='putYaOwnKey') def plot(sphero, step_count): if step_count == 40000: # if step_count == 1000:", "if step_count == 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour", "plotly.plotly as py import plotly.graph_objs as go # plotly # import plotly #", "go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout = go.Layout( title=go.layout.Title( # text='Error", "direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout =", "xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), #", "x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction'", "), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New, monospace', size=28,", "as py import plotly.graph_objs as go # plotly # import plotly # plotly.tools.set_credentials_file(", "x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list,", "1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2', #", "size=20, color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 ) ) #", "= [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') #", "name='unfiltered x direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' )", "monospace', size=28, color='#000' ) ), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict(", "x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list,", "x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y", "xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New, monospace', size=28, color='#000'", "color='#000' ) ), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New,", "text='Error (pixels)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ),", "= go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y", "direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour =", "[x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') # py.plot(data,", "y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF',", ") y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list,", "def plot(sphero, step_count): if step_count == 40000: # if step_count == 1000: x_error_behaviour", "= go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered", "py import plotly.graph_objs as go # plotly # import plotly # plotly.tools.set_credentials_file( #", "# text='Error Behaviour 3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict(", "color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 ) ) # print(sphero.plot_time_list)", "y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout = go.Layout( title=go.layout.Title(", "New, monospace', size=28, color='#000' ) ), # showticklabels=False ), legend=dict( # x=0, #", "font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ), legend=dict( #", "# if step_count == 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' )", "print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout)", "y direction' ) layout = go.Layout( title=go.layout.Title( # text='Error Behaviour 3: Maze', xref='paper',", "x direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout", "), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ),", "# print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data,", "print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) # py.iplot(fig,", "y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour", "step_count): if step_count == 40000: # if step_count == 1000: x_error_behaviour = go.Scatter(", "plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if step_count == 40000: # if", "layout = go.Layout( title=go.layout.Title( # text='Error Behaviour 3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis(", "= go.Layout( title=go.layout.Title( # text='Error Behaviour 3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title(", "text='Time (s)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ),", "plot(sphero, step_count): if step_count == 40000: # if step_count == 1000: x_error_behaviour =", "y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout = go.Layout( title=go.layout.Title( # text='Error Behaviour 3:", "y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' )", "# borderwidth=2 ) ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour,", "step_count == 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour =", "y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') # py.plot(data, filename='error", "(s)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ), yaxis=go.layout.YAxis(", "title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False", "go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered", "family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ), legend=dict( # x=0,", "1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list,", "x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') # py.plot(data, filename='error behaviour',", "x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout = go.Layout( title=go.layout.Title( # text='Error Behaviour", "font=dict( family='sans-serif', size=20, color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 )", "== 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour = go.Scatter(", "family='sans-serif', size=20, color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 ) )", "yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), #", "# bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 ) ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data", "3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New, monospace',", "go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') # py.plot(data, filename='error behaviour', auto_open=True) py.plot(fig, filename='unfiltered_error', auto_open=True)", "size=28, color='#000' ) ), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier", ") x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour = go.Scatter(", "data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names')", "plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if step_count == 40000:", "y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction'", "y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') # py.plot(data, filename='error behaviour', auto_open=True)", "= go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') # py.plot(data, filename='error behaviour', auto_open=True) py.plot(fig, filename='unfiltered_error',", "# showticklabels=False ), legend=dict( # x=0, # y=0, x= 1.1, y= 1.2, traceorder='normal',", "<filename>plot_error.py import plotly.plotly as py import plotly.graph_objs as go # plotly # import", "bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 ) ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data =", "name='x direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour =", "= go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout = go.Layout( title=go.layout.Title( #", "# print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) #", "import plotly.plotly as py import plotly.graph_objs as go # plotly # import plotly", "font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title(", "= go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list,", "40000: # if step_count == 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction'", "# plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if step_count == 40000: #", "title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False", ") layout = go.Layout( title=go.layout.Title( # text='Error Behaviour 3: Maze', xref='paper', x=0 ),", "color='#000' ) ), # showticklabels=False ), legend=dict( # x=0, # y=0, x= 1.1,", "name='unfiltered y direction' ) layout = go.Layout( title=go.layout.Title( # text='Error Behaviour 3: Maze',", "), legend=dict( # x=0, # y=0, x= 1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif',", "1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', #", "showticklabels=False ), legend=dict( # x=0, # y=0, x= 1.1, y= 1.2, traceorder='normal', font=dict(", "# y=0, x= 1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ), orientation=\"h\",", "# import plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if step_count", "as go # plotly # import plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def", "text='Error Behaviour 3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier", ") y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout = go.Layout(", "direction' ) layout = go.Layout( title=go.layout.Title( # text='Error Behaviour 3: Maze', xref='paper', x=0", "Behaviour 3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New,", "# x=0, # y=0, x= 1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000'", "size=28, color='#000' ) ), # showticklabels=False ), legend=dict( # x=0, # y=0, x=", "title=go.layout.Title( # text='Error Behaviour 3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)',", "showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New, monospace', size=28, color='#000' )", "step_count == 40000: # if step_count == 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list,", "x= 1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ), orientation=\"h\", # bgcolor='#E2E2E2',", "legend=dict( # x=0, # y=0, x= 1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20,", ") # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig =", "x=0, # y=0, x= 1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ),", "), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ),", ") ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig", "username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if step_count == 40000: # if step_count ==", "y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list,", "== 40000: # if step_count == 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x", "# username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if step_count == 40000: # if step_count", "family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error", "# bordercolor='#FFFFFF', # borderwidth=2 ) ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour,", ") ), # showticklabels=False ), legend=dict( # x=0, # y=0, x= 1.1, y=" ]
[ "string = \"abc\" step = mc.walk(string) assert len(step) == 1 assert step in", "= object message = r\"object of type 'type' has no len\\(\\)\" with pytest.raises(TypeError,", "mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42 message = r\"object of type 'int' has", "= \"abc\" step = mc.walk(string) assert len(step) == 1 assert step in string", "test_walk_ok_empty_set(): seq = {} assert mc.walk(seq) is None def test_walk_ok_empty_dict(): seq = dict()", "r\"object of type 'int' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float():", "len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r\"object of type 'generator'", "None def test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq) is None def test_walk_ok_string(): string", "mc def test_walk_ok_empty_string(): empty = '' assert mc.walk(empty) is None def test_walk_ok_empty_list(): seq", "seq = dict() assert mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq = tuple() assert", "complex(1, -1) message = r\"object of type 'complex' has no len\\(\\)\" with pytest.raises(TypeError,", "# type: ignore import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty = '' assert", "message = r\"object of type 'float' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad)", "[] assert mc.walk(seq) is None def test_walk_ok_empty_set(): seq = {} assert mc.walk(seq) is", "= [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same = iter([1,", "type 'type' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad =", "no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42 message =", "pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42 message = r\"object of type", "= {\"a\": \"b\"} message = r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad", "message = r\"object of type 'complex' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad)", "assert mc.walk(empty) is None def test_walk_ok_empty_list(): seq = [] assert mc.walk(seq) is None", "def test_walk_ok_int_dict(): seq = {0: \"a\", 1: \"b\"} assert mc.walk(seq) in seq.values() def", "'' assert mc.walk(empty) is None def test_walk_ok_empty_list(): seq = [] assert mc.walk(seq) is", "match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad) is None def test_walk_nok_wrong_type_object():", "assert mc.walk(seq) is None def test_walk_ok_empty_set(): seq = {} assert mc.walk(seq) is None", "disable=missing-docstring,unused-import,reimported import pytest # type: ignore import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty", "test_walk_ok_string_list(): the_same = \"a\" seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same", "= {0: \"a\", 1: \"b\"} assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq =", "for n in range(1234)) def test_walk_nok_wrong_type_function(): message = r\"object of type 'builtin_function_or_method' has", "as mc def test_walk_ok_empty_string(): empty = '' assert mc.walk(empty) is None def test_walk_ok_empty_list():", "message = r\"object of type 'generator' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n", "test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq = tuple()", "= mc.walk(a_range) assert step in a_range assert isinstance(step, int) def test_walk_ok_function_list(): the_same =", "len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n for n in range(1234)) def test_walk_nok_wrong_type_function(): message =", "match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415 message = r\"object of type 'float'", "= 3.1415 message = r\"object of type 'float' has no len\\(\\)\" with pytest.raises(TypeError,", "None def test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq", "bad = 42 message = r\"object of type 'int' has no len\\(\\)\" with", "object message = r\"object of type 'type' has no len\\(\\)\" with pytest.raises(TypeError, match=message):", "assert isinstance(step, int) def test_walk_ok_function_list(): the_same = print seq = [the_same, the_same, the_same]", "no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message", "pytest.raises(TypeError, match=message): mc.walk(n for n in range(1234)) def test_walk_nok_wrong_type_function(): message = r\"object of", "type 'complex' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message =", "def test_walk_ok_string(): string = \"abc\" step = mc.walk(string) assert len(step) == 1 assert", "= [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq = {0:", "test_walk_ok_int_dict(): seq = {0: \"a\", 1: \"b\"} assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict():", "type 'float' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad =", "in seq.values() def test_walk_nok_string_dict(): seq = {\"a\": \"b\"} message = r\"0\" with pytest.raises(KeyError,", "mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r\"object of type 'generator' has no len\\(\\)\" with", "== 1 assert step in string def test_walk_ok_string_list(): the_same = \"a\" seq =", "of type 'type' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad", "= range(42) step = mc.walk(a_range) assert step in a_range assert isinstance(step, int) def", "assert mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq = {0: \"a\", 1: \"b\"} assert", "is None def test_walk_nok_wrong_type_object(): bad = object message = r\"object of type 'type'", "bad = complex(1, -1) message = r\"object of type 'complex' has no len\\(\\)\"", "int) def test_walk_ok_function_list(): the_same = print seq = [the_same, the_same, the_same] assert mc.walk(seq)", "test_walk_nok_string_dict(): seq = {\"a\": \"b\"} message = r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq) def", "step = mc.walk(string) assert len(step) == 1 assert step in string def test_walk_ok_string_list():", "mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad = object message = r\"object of type", "test_walk_ok_string(): string = \"abc\" step = mc.walk(string) assert len(step) == 1 assert step", "a_range = range(42) step = mc.walk(a_range) assert step in a_range assert isinstance(step, int)", "n in range(1234)) def test_walk_nok_wrong_type_function(): message = r\"object of type 'builtin_function_or_method' has no", "of type 'float' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad", "[the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_range(): a_range = range(42) step", "def test_walk_ok_string_list(): the_same = \"a\" seq = [the_same, the_same, the_same] assert mc.walk(seq) ==", "test_walk_ok_empty_list(): seq = [] assert mc.walk(seq) is None def test_walk_ok_empty_set(): seq = {}", "= r\"object of type 'complex' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def", "is None def test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq) is None def test_walk_ok_string():", "mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty = '' assert mc.walk(empty) is None def", "is None def test_walk_ok_empty_set(): seq = {} assert mc.walk(seq) is None def test_walk_ok_empty_dict():", "mc.walk(seq) is None def test_walk_ok_string(): string = \"abc\" step = mc.walk(string) assert len(step)", "bad = 3.1415 message = r\"object of type 'float' has no len\\(\\)\" with", "has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42 message", "assert mc.walk(seq) is None def test_walk_ok_string(): string = \"abc\" step = mc.walk(string) assert", "assert mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq) is None", "-1) message = r\"object of type 'complex' has no len\\(\\)\" with pytest.raises(TypeError, match=message):", "def test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq) is None def test_walk_ok_string(): string =", "type 'int' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad =", "= r\"object of type 'generator' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n for", "def test_walk_nok_wrong_type_function(): message = r\"object of type 'builtin_function_or_method' has no len\\(\\)\" with pytest.raises(TypeError,", "test_walk_ok_empty_string(): empty = '' assert mc.walk(empty) is None def test_walk_ok_empty_list(): seq = []", "step = mc.walk(a_range) assert step in a_range assert isinstance(step, int) def test_walk_ok_function_list(): the_same", "= [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_range(): a_range = range(42)", "= '' assert mc.walk(empty) is None def test_walk_ok_empty_list(): seq = [] assert mc.walk(seq)", "r\"object of type 'float' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex():", "the_same] assert mc.walk(seq) == the_same def test_walk_ok_range(): a_range = range(42) step = mc.walk(a_range)", "test_walk_ok_function_list(): the_same = print seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same", "seq.values() def test_walk_nok_string_dict(): seq = {\"a\": \"b\"} message = r\"0\" with pytest.raises(KeyError, match=message):", "def test_walk_ok_empty_string(): empty = '' assert mc.walk(empty) is None def test_walk_ok_empty_list(): seq =", "[the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same = iter([1, 2,", "the_same = print seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def", "with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r\"object of type 'generator' has", "42 message = r\"object of type 'int' has no len\\(\\)\" with pytest.raises(TypeError, match=message):", "has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r\"object of", "assert mc.walk(seq) == the_same def test_walk_ok_range(): a_range = range(42) step = mc.walk(a_range) assert", "import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty = '' assert mc.walk(empty) is None", "def test_walk_ok_range(): a_range = range(42) step = mc.walk(a_range) assert step in a_range assert", "def test_walk_nok_wrong_type_float(): bad = 3.1415 message = r\"object of type 'float' has no", "is None def test_walk_ok_string(): string = \"abc\" step = mc.walk(string) assert len(step) ==", "'complex' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r\"object", "has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1, -1)", "1 assert step in string def test_walk_ok_string_list(): the_same = \"a\" seq = [the_same,", "== the_same def test_walk_ok_iterator_list(): the_same = iter([1, 2, 3]) seq = [the_same, the_same,", "step in string def test_walk_ok_string_list(): the_same = \"a\" seq = [the_same, the_same, the_same]", "{} assert mc.walk(seq) is None def test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq) is", "string def test_walk_ok_string_list(): the_same = \"a\" seq = [the_same, the_same, the_same] assert mc.walk(seq)", "utf-8 -*- # pylint: disable=missing-docstring,unused-import,reimported import pytest # type: ignore import mc_flow_sim.mc_flow_sim as", "is None def test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq) is None def test_walk_ok_empty_tuple():", "{\"a\": \"b\"} message = r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad =", "pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad) is None def", "= complex(1, -1) message = r\"object of type 'complex' has no len\\(\\)\" with", "test_walk_nok_wrong_type_function(): message = r\"object of type 'builtin_function_or_method' has no len\\(\\)\" with pytest.raises(TypeError, match=message):", "-*- # pylint: disable=missing-docstring,unused-import,reimported import pytest # type: ignore import mc_flow_sim.mc_flow_sim as mc", "with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415 message = r\"object of", "= None assert mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad = object message =", "None def test_walk_ok_empty_list(): seq = [] assert mc.walk(seq) is None def test_walk_ok_empty_set(): seq", "def test_walk_ok_function_list(): the_same = print seq = [the_same, the_same, the_same] assert mc.walk(seq) ==", "= r\"object of type 'type' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def", "import pytest # type: ignore import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty =", "range(42) step = mc.walk(a_range) assert step in a_range assert isinstance(step, int) def test_walk_ok_function_list():", "test_walk_nok_wrong_type_object(): bad = object message = r\"object of type 'type' has no len\\(\\)\"", "with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42 message = r\"object of", "with pytest.raises(TypeError, match=message): mc.walk(n for n in range(1234)) def test_walk_nok_wrong_type_function(): message = r\"object", "print seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same", "def test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq =", "is None def test_walk_ok_empty_list(): seq = [] assert mc.walk(seq) is None def test_walk_ok_empty_set():", "mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq = {0: \"a\", 1: \"b\"} assert mc.walk(seq)", "in string def test_walk_ok_string_list(): the_same = \"a\" seq = [the_same, the_same, the_same] assert", "match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42 message = r\"object of type 'int'", "def test_walk_ok_iterator_list(): the_same = iter([1, 2, 3]) seq = [the_same, the_same, the_same] assert", "def test_walk_nok_string_dict(): seq = {\"a\": \"b\"} message = r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq)", "message = r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None assert", "bad = None assert mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad = object message", "assert mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad = object message = r\"object of", "match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message = r\"object of type", "test_walk_nok_wrong_type_float(): bad = 3.1415 message = r\"object of type 'float' has no len\\(\\)\"", "the_same = \"a\" seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def", "iter([1, 2, 3]) seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def", "'type' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42", "1: \"b\"} assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq = {\"a\": \"b\"} message", "test_walk_ok_iterator_list(): the_same = iter([1, 2, 3]) seq = [the_same, the_same, the_same] assert mc.walk(seq)", "'generator' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n for n in range(1234)) def", "mc.walk(empty) is None def test_walk_ok_empty_list(): seq = [] assert mc.walk(seq) is None def", "test_walk_ok_range(): a_range = range(42) step = mc.walk(a_range) assert step in a_range assert isinstance(step,", "tuple() assert mc.walk(seq) is None def test_walk_ok_string(): string = \"abc\" step = mc.walk(string)", "[the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq = {0: \"a\",", "\"a\" seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_range(): a_range", "seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same =", "mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad", "the_same = iter([1, 2, 3]) seq = [the_same, the_same, the_same] assert mc.walk(seq) ==", "3.1415 message = r\"object of type 'float' has no len\\(\\)\" with pytest.raises(TypeError, match=message):", "test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message = r\"object of type 'complex' has no", "message = r\"object of type 'int' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad)", "the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq = {0: \"a\", 1:", "\"b\"} message = r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None", "the_same] assert mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same = iter([1, 2, 3]) seq", "= {} assert mc.walk(seq) is None def test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq)", "mc.walk(seq) == the_same def test_walk_ok_range(): a_range = range(42) step = mc.walk(a_range) assert step", "the_same def test_walk_ok_range(): a_range = range(42) step = mc.walk(a_range) assert step in a_range", "assert step in a_range assert isinstance(step, int) def test_walk_ok_function_list(): the_same = print seq", "'float' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1,", "= 42 message = r\"object of type 'int' has no len\\(\\)\" with pytest.raises(TypeError,", "with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message = r\"object", "pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r\"object of type 'generator' has no", "match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r\"object of type 'generator' has no len\\(\\)\"", "3]) seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq", "seq = {\"a\": \"b\"} message = r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none():", "has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n for n in range(1234)) def test_walk_nok_wrong_type_function():", "mc.walk(n for n in range(1234)) def test_walk_nok_wrong_type_function(): message = r\"object of type 'builtin_function_or_method'", "range(1234)) def test_walk_nok_wrong_type_function(): message = r\"object of type 'builtin_function_or_method' has no len\\(\\)\" with", "2, 3]) seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_int_dict():", "assert mc.walk(seq) is None def test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq) is None", "match=message): mc.walk(n for n in range(1234)) def test_walk_nok_wrong_type_function(): message = r\"object of type", "with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad) is None", "def test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message = r\"object of type 'complex' has", "no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415 message =", "= [] assert mc.walk(seq) is None def test_walk_ok_empty_set(): seq = {} assert mc.walk(seq)", "test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad = object", "= r\"object of type 'float' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def", "== the_same def test_walk_ok_int_dict(): seq = {0: \"a\", 1: \"b\"} assert mc.walk(seq) in", "isinstance(step, int) def test_walk_ok_function_list(): the_same = print seq = [the_same, the_same, the_same] assert", "= \"a\" seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_range():", "= r\"object of type 'int' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def", "r\"object of type 'complex' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression():", "in range(1234)) def test_walk_nok_wrong_type_function(): message = r\"object of type 'builtin_function_or_method' has no len\\(\\)\"", "mc.walk(string) assert len(step) == 1 assert step in string def test_walk_ok_string_list(): the_same =", "def test_walk_nok_wrong_type_generator_expression(): message = r\"object of type 'generator' has no len\\(\\)\" with pytest.raises(TypeError,", "seq = {0: \"a\", 1: \"b\"} assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq", "def test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad =", "len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415 message = r\"object", "len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message =", "assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq = {\"a\": \"b\"} message = r\"0\"", "test_walk_nok_wrong_type_generator_expression(): message = r\"object of type 'generator' has no len\\(\\)\" with pytest.raises(TypeError, match=message):", "None def test_walk_ok_string(): string = \"abc\" step = mc.walk(string) assert len(step) == 1", "len(step) == 1 assert step in string def test_walk_ok_string_list(): the_same = \"a\" seq", "seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq =", "message = r\"object of type 'type' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad)", "type 'generator' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n for n in range(1234))", "type: ignore import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty = '' assert mc.walk(empty)", "len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42 message = r\"object", "seq = tuple() assert mc.walk(seq) is None def test_walk_ok_string(): string = \"abc\" step", "no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n for n in range(1234)) def test_walk_nok_wrong_type_function(): message", "has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415 message", "def test_walk_ok_empty_list(): seq = [] assert mc.walk(seq) is None def test_walk_ok_empty_set(): seq =", "mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message = r\"object of type 'complex'", "r\"object of type 'generator' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n for n", "None def test_walk_nok_wrong_type_object(): bad = object message = r\"object of type 'type' has", "mc.walk(seq) is None def test_walk_ok_empty_set(): seq = {} assert mc.walk(seq) is None def", "mc.walk(a_range) assert step in a_range assert isinstance(step, int) def test_walk_ok_function_list(): the_same = print", "= r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad)", "the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_range(): a_range = range(42) step =", "= print seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_iterator_list():", "test_walk_nok_wrong_type_int(): bad = 42 message = r\"object of type 'int' has no len\\(\\)\"", "assert step in string def test_walk_ok_string_list(): the_same = \"a\" seq = [the_same, the_same,", "assert mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same = iter([1, 2, 3]) seq =", "in a_range assert isinstance(step, int) def test_walk_ok_function_list(): the_same = print seq = [the_same,", "r\"object of type 'type' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int():", "no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r\"object of type", "ignore import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty = '' assert mc.walk(empty) is", "test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq) is None def test_walk_ok_string(): string = \"abc\"", "= mc.walk(string) assert len(step) == 1 assert step in string def test_walk_ok_string_list(): the_same", "mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq) is None def", "the_same def test_walk_ok_int_dict(): seq = {0: \"a\", 1: \"b\"} assert mc.walk(seq) in seq.values()", "pytest # type: ignore import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty = ''", "coding: utf-8 -*- # pylint: disable=missing-docstring,unused-import,reimported import pytest # type: ignore import mc_flow_sim.mc_flow_sim", "pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message = r\"object of", "\"a\", 1: \"b\"} assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq = {\"a\": \"b\"}", "# pylint: disable=missing-docstring,unused-import,reimported import pytest # type: ignore import mc_flow_sim.mc_flow_sim as mc def", "mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415 message = r\"object of type 'float' has", "of type 'generator' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(n for n in", "the_same def test_walk_ok_iterator_list(): the_same = iter([1, 2, 3]) seq = [the_same, the_same, the_same]", "seq = [] assert mc.walk(seq) is None def test_walk_ok_empty_set(): seq = {} assert", "'int' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415", "\"abc\" step = mc.walk(string) assert len(step) == 1 assert step in string def", "the_same] assert mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq = {0: \"a\", 1: \"b\"}", "seq = {} assert mc.walk(seq) is None def test_walk_ok_empty_dict(): seq = dict() assert", "of type 'int' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad", "pylint: disable=missing-docstring,unused-import,reimported import pytest # type: ignore import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string():", "def test_walk_nok_wrong_type_object(): bad = object message = r\"object of type 'type' has no", "== the_same def test_walk_ok_range(): a_range = range(42) step = mc.walk(a_range) assert step in", "bad = object message = r\"object of type 'type' has no len\\(\\)\" with", "= iter([1, 2, 3]) seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same", "step in a_range assert isinstance(step, int) def test_walk_ok_function_list(): the_same = print seq =", "pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415 message = r\"object of type", "\"b\"} assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq = {\"a\": \"b\"} message =", "mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq = {\"a\": \"b\"} message = r\"0\" with", "None assert mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad = object message = r\"object", "def test_walk_nok_wrong_type_int(): bad = 42 message = r\"object of type 'int' has no", "of type 'complex' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message", "def test_walk_ok_empty_set(): seq = {} assert mc.walk(seq) is None def test_walk_ok_empty_dict(): seq =", "= tuple() assert mc.walk(seq) is None def test_walk_ok_string(): string = \"abc\" step =", "message = r\"object of type 'builtin_function_or_method' has no len\\(\\)\" with pytest.raises(TypeError, match=message): mc.walk(print)", "r\"0\" with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad) is", "dict() assert mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq) is", "assert len(step) == 1 assert step in string def test_walk_ok_string_list(): the_same = \"a\"", "= dict() assert mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq)", "mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same = iter([1, 2, 3]) seq = [the_same,", "a_range assert isinstance(step, int) def test_walk_ok_function_list(): the_same = print seq = [the_same, the_same,", "seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_range(): a_range =", "# -*- coding: utf-8 -*- # pylint: disable=missing-docstring,unused-import,reimported import pytest # type: ignore", "-*- coding: utf-8 -*- # pylint: disable=missing-docstring,unused-import,reimported import pytest # type: ignore import", "mc.walk(seq) is None def test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq) is None def", "None def test_walk_ok_empty_set(): seq = {} assert mc.walk(seq) is None def test_walk_ok_empty_dict(): seq", "empty = '' assert mc.walk(empty) is None def test_walk_ok_empty_list(): seq = [] assert", "the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same = iter([1, 2, 3])", "{0: \"a\", 1: \"b\"} assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq = {\"a\":" ]
[ "print(cameras) print (\"Using camera %s ...\" % cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480))", "class PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected or", "import pygame import pygame.camera class PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init() cameras =", "= pygame.camera.list_cameras() #Camera detected or not print(cameras) print (\"Using camera %s ...\" %", "pygame import pygame.camera class PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras()", "start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected or not print(cameras) print (\"Using camera", "(\"Using camera %s ...\" % cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def", "pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self, name): img = self.cam.get_image() img = pygame.transform.rotate(img,", "self.cam.start() def capture_image(self, name): img = self.cam.get_image() img = pygame.transform.rotate(img, -90) img =", "#Camera detected or not print(cameras) print (\"Using camera %s ...\" % cameras[0]) self.cam", "(640, 480)) self.cam.start() def capture_image(self, name): img = self.cam.get_image() img = pygame.transform.rotate(img, -90)", "camera %s ...\" % cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self,", "class to grab webcam frames using pygame camera import os import pygame import", "self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self, name): img = self.cam.get_image() img", "% cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self, name): img =", "...\" % cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self, name): img", "camera import os import pygame import pygame.camera class PygameCameraModule(object): #def __init__(self): def start(self):", "def capture_image(self, name): img = self.cam.get_image() img = pygame.transform.rotate(img, -90) img = pygame.transform.smoothscale(img,", "webcam frames using pygame camera import os import pygame import pygame.camera class PygameCameraModule(object):", "480)) self.cam.start() def capture_image(self, name): img = self.cam.get_image() img = pygame.transform.rotate(img, -90) img", "cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self, name): img = self.cam.get_image()", "<gh_stars>0 #simple class to grab webcam frames using pygame camera import os import", "grab webcam frames using pygame camera import os import pygame import pygame.camera class", "import pygame.camera class PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera", "PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected or not", "name): img = self.cam.get_image() img = pygame.transform.rotate(img, -90) img = pygame.transform.smoothscale(img, (960, 1280))", "__init__(self): def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected or not print(cameras) print", "cameras = pygame.camera.list_cameras() #Camera detected or not print(cameras) print (\"Using camera %s ...\"", "def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected or not print(cameras) print (\"Using", "#simple class to grab webcam frames using pygame camera import os import pygame", "img = self.cam.get_image() img = pygame.transform.rotate(img, -90) img = pygame.transform.smoothscale(img, (960, 1280)) pygame.image.save(img,name)", "not print(cameras) print (\"Using camera %s ...\" % cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640,", "to grab webcam frames using pygame camera import os import pygame import pygame.camera", "using pygame camera import os import pygame import pygame.camera class PygameCameraModule(object): #def __init__(self):", "#def __init__(self): def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected or not print(cameras)", "pygame.camera.list_cameras() #Camera detected or not print(cameras) print (\"Using camera %s ...\" % cameras[0])", "detected or not print(cameras) print (\"Using camera %s ...\" % cameras[0]) self.cam =", "frames using pygame camera import os import pygame import pygame.camera class PygameCameraModule(object): #def", "= pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self, name): img = self.cam.get_image() img =", "capture_image(self, name): img = self.cam.get_image() img = pygame.transform.rotate(img, -90) img = pygame.transform.smoothscale(img, (960,", "import os import pygame import pygame.camera class PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init()", "pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected or not print(cameras) print (\"Using camera %s", "or not print(cameras) print (\"Using camera %s ...\" % cameras[0]) self.cam = pygame.camera.Camera(cameras[0],", "pygame camera import os import pygame import pygame.camera class PygameCameraModule(object): #def __init__(self): def", "print (\"Using camera %s ...\" % cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start()", "%s ...\" % cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self, name):", "pygame.camera class PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected", "os import pygame import pygame.camera class PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init() cameras" ]
[ "\"\"\" A test script to start Cassandra. \"\"\" import logging import os import", "run(): \"\"\" Starts up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False) logging.warning(\"Done!\") if __name__", "sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def run(): \"\"\" Starts up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\")", "A test script to start Cassandra. \"\"\" import logging import os import sys", "def run(): \"\"\" Starts up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False) logging.warning(\"Done!\") if", "\"\"\" Starts up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False) logging.warning(\"Done!\") if __name__ ==", "start Cassandra. \"\"\" import logging import os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\"))", "monit_interface def run(): \"\"\" Starts up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False) logging.warning(\"Done!\")", "Starts up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False) logging.warning(\"Done!\") if __name__ == '__main__':", "test script to start Cassandra. \"\"\" import logging import os import sys import", "Cassandra. \"\"\" import logging import os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import", "logging import os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def run():", "os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def run(): \"\"\" Starts", "import logging import os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def", "import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def run(): \"\"\" Starts up", "script to start Cassandra. \"\"\" import logging import os import sys import cassandra_interface", "sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def run(): \"\"\" Starts up cassandra.", "\"\"\" import logging import os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface", "to start Cassandra. \"\"\" import logging import os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__),", "import monit_interface def run(): \"\"\" Starts up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False)", "cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def run(): \"\"\" Starts up cassandra. \"\"\" logging.warning(\"Starting", "import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def run(): \"\"\" Starts up cassandra. \"\"\"", "\"../../lib\")) import monit_interface def run(): \"\"\" Starts up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME,", "up cassandra. \"\"\" logging.warning(\"Starting Cassandra.\") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False) logging.warning(\"Done!\") if __name__ == '__main__': run()", "import os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), \"../../lib\")) import monit_interface def run(): \"\"\"" ]
[ "for i in range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time() -", "not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for model \"{model_spec.name}\" to \"{ys_file_path}\".') else:", "get_model_file_path, get_data_file_path, get_model, ModelSpec from models import model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]:", "dest='num_iterations', default=1000, type=int, help='Number of performance measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print", "y=model_spec.ys, verbose=0) print(f'Keras accuracy for the model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name:", "class Command: name: str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute)", "= _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path):", "model_spec.get_model) cumulative_time = 0.0 for i in range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0)", "raise ValueError(f'Invalid model name: {args.model}') return model_spec class Command: name: str @classmethod def", "args.iteration_time: print(f'Iteration {i} time: {iteration_time * 1e6}') cumulative_time += iteration_time print(f'Keras performance for", "Type from horn import save_model, save_tensor from common import Args, SubParsers, get_model_file_path, get_data_file_path,", "Command: name: str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser)", "print(f'Y data for model `{model_spec.name}` already exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str", "horn import save_model, save_tensor from common import Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec", "get_data_file_path, get_model, ModelSpec from models import model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec", "time' ) @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model,", "Args): model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if", "iteration_time print(f'Keras performance for model `{model_spec.name}` : {cumulative_time * 1e6}') commands: List[Type[Command]] =", "def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores =", "args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for", "1e6}') cumulative_time += iteration_time print(f'Keras performance for model `{model_spec.name}` : {cumulative_time * 1e6}')", "def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec is None: raise", "os from time import time from typing import List, Type from horn import", "else: print(f'Y data for model `{model_spec.name}` already exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name:", "from horn import save_model, save_tensor from common import Args, SubParsers, get_model_file_path, get_data_file_path, get_model,", "- start if args.iteration_time: print(f'Iteration {i} time: {iteration_time * 1e6}') cumulative_time += iteration_time", "= _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy", "'--iteration-time', action='store_true', help='Print iteration time' ) @classmethod def execute(cls, args: Args): model_spec =", "0.0 for i in range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time()", "@classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path", "ModelSpec from models import model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model)", "else: print(f'X data for model `{model_spec.name}` already exists at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path):", "model = get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for i in range(args.num_iterations): start =", "str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def", "model.predict(x=model_spec.xs, verbose=0) iteration_time = time() - start if args.iteration_time: print(f'Iteration {i} time: {iteration_time", "help='Print iteration time' ) @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model", "already exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod def execute(cls,", "parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time' ) @classmethod def execute(cls, args: Args): model_spec", "from common import Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from models import model_specs", "iteration time' ) @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model =", ") @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model)", "= model_specs.get(args.model) if model_spec is None: raise ValueError(f'Invalid model name: {args.model}') return model_spec", ") parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time' ) @classmethod def execute(cls, args: Args):", "import os from time import time from typing import List, Type from horn", "SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from models import model_specs def _extract_model_spec(args: Args) ->", "ValueError(f'Invalid model name: {args.model}') return model_spec class Command: name: str @classmethod def add_cli_parser(cls,", "\"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod def execute(cls, args: Args): model_spec", "str = 'test_correctness' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model =", "name: str = 'test_correctness' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model", "parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of performance measurement iterations' )", "save_model(model, model_file_path) print(f'Model has been saved to \"{model_file_path}\"') class SaveDataCommand(Command): name: str =", "parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of performance measurement iterations' ) parser.add_argument(", "measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time' ) @classmethod def execute(cls,", "parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass @classmethod def execute(cls,", "Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for i", "time() - start if args.iteration_time: print(f'Iteration {i} time: {iteration_time * 1e6}') cumulative_time +=", "class SaveModelCommand(Command): name: str = 'save_model' @classmethod def execute(cls, args: Args): model_spec =", "@classmethod def _add_cli_arguments(cls, parser): pass @classmethod def execute(cls, args: Args): raise NotImplementedError() class", "data for model `{model_spec.name}` already exists at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path)", "import save_model, save_tensor from common import Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from", "model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model", "parser): pass @classmethod def execute(cls, args: Args): raise NotImplementedError() class SaveModelCommand(Command): name: str", "model = get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for the", "{scores[1]}') class TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n',", "class TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations',", "save_model, save_tensor from common import Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from models", "name: str = 'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000,", "action='store_true', help='Print iteration time' ) @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args)", "{iteration_time * 1e6}') cumulative_time += iteration_time print(f'Keras performance for model `{model_spec.name}` : {cumulative_time", "i in range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time() - start", "exists at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for model", "not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for model \"{model_spec.name}\" to \"{xs_file_path}\".') else:", "def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path =", "for model `{model_spec.name}` already exists at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved", "the model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod def", "_add_cli_arguments(cls, parser): pass @classmethod def execute(cls, args: Args): raise NotImplementedError() class SaveModelCommand(Command): name:", "start if args.iteration_time: print(f'Iteration {i} time: {iteration_time * 1e6}') cumulative_time += iteration_time print(f'Keras", "def add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser):", "List, Type from horn import save_model, save_tensor from common import Args, SubParsers, get_model_file_path,", "raise NotImplementedError() class SaveModelCommand(Command): name: str = 'save_model' @classmethod def execute(cls, args: Args):", "Args): raise NotImplementedError() class SaveModelCommand(Command): name: str = 'save_model' @classmethod def execute(cls, args:", "model `{model_spec.name}` already exists at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y", "common import Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from models import model_specs def", "= _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has", "\"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y data for model `{model_spec.name}` already exists at \"{ys_file_path}\"')", "at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for model \"{model_spec.name}\"", "`{model_spec.name}` already exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod def", "models import model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec", "model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for the model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command):", "'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of", "Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from models import model_specs def _extract_model_spec(args: Args)", "ys_file_path = get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for", "@classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls,", "model = get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been saved", "'test_correctness' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model)", "_extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been", "of performance measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time' ) @classmethod", "= 0.0 for i in range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time =", "class TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod def execute(cls, args: Args): model_spec =", "SaveDataCommand(Command): name: str = 'save_data' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args)", "print(f'Saved Y data for model \"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y data for model", "to \"{ys_file_path}\".') else: print(f'Y data for model `{model_spec.name}` already exists at \"{ys_file_path}\"') class", "_extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for i in range(args.num_iterations): start", "= time() - start if args.iteration_time: print(f'Iteration {i} time: {iteration_time * 1e6}') cumulative_time", "`{model_spec.name}` : {cumulative_time * 1e6}') commands: List[Type[Command]] = [ SaveModelCommand, SaveDataCommand, TestCorrectnessCommand, TestPerformanceCommand,", "import model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec is", "model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for the model `{model_spec.name}` is", "Y data for model \"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y data for model `{model_spec.name}`", "= get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for the model", "cumulative_time = 0.0 for i in range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time", "already exists at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for", "= time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time() - start if args.iteration_time: print(f'Iteration {i}", "\"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for model \"{model_spec.name}\" to", "import Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from models import model_specs def _extract_model_spec(args:", "print(f'Saved X data for model \"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X data for model", "get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for the model `{model_spec.name}`", "* 1e6}') cumulative_time += iteration_time print(f'Keras performance for model `{model_spec.name}` : {cumulative_time *", "for model \"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y data for model `{model_spec.name}` already exists", "@classmethod def execute(cls, args: Args): raise NotImplementedError() class SaveModelCommand(Command): name: str = 'save_model'", "model \"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y data for model `{model_spec.name}` already exists at", "model_file_path) print(f'Model has been saved to \"{model_file_path}\"') class SaveDataCommand(Command): name: str = 'save_data'", "data for model `{model_spec.name}` already exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str =", "at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod def execute(cls, args: Args):", "data for model \"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X data for model `{model_spec.name}` already", ": {cumulative_time * 1e6}') commands: List[Type[Command]] = [ SaveModelCommand, SaveDataCommand, TestCorrectnessCommand, TestPerformanceCommand, ]", "is None: raise ValueError(f'Invalid model name: {args.model}') return model_spec class Command: name: str", "= get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been saved to \"{model_file_path}\"') class SaveDataCommand(Command): name:", "@classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time", "model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec is None:", "'--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of performance measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true',", "X data for model \"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X data for model `{model_spec.name}`", "from time import time from typing import List, Type from horn import save_model,", "args: Args): model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y')", "def execute(cls, args: Args): raise NotImplementedError() class SaveModelCommand(Command): name: str = 'save_model' @classmethod", "accuracy for the model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name: str = 'test_performance'", "iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time' ) @classmethod def execute(cls, args:", "execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time = 0.0", "for the model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod", "+= iteration_time print(f'Keras performance for model `{model_spec.name}` : {cumulative_time * 1e6}') commands: List[Type[Command]]", "'save_model' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model)", "def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of performance measurement", "get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X", "performance measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time' ) @classmethod def", "print(f'Keras performance for model `{model_spec.name}` : {cumulative_time * 1e6}') commands: List[Type[Command]] = [", "model_spec class Command: name: str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name)", "saved to \"{model_file_path}\"') class SaveDataCommand(Command): name: str = 'save_data' @classmethod def execute(cls, args:", "= get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved", "get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for model \"{model_spec.name}\"", "time from typing import List, Type from horn import save_model, save_tensor from common", "-> Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec is None: raise ValueError(f'Invalid model name:", "args: Args): raise NotImplementedError() class SaveModelCommand(Command): name: str = 'save_model' @classmethod def execute(cls,", "name: str = 'save_model' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model", "Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec is None: raise ValueError(f'Invalid model name: {args.model}')", "\"{model_file_path}\"') class SaveDataCommand(Command): name: str = 'save_data' @classmethod def execute(cls, args: Args): model_spec", "if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for model \"{model_spec.name}\" to \"{ys_file_path}\".')", "for model \"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X data for model `{model_spec.name}` already exists", "exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod def execute(cls, args:", "save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for model \"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X data", "verbose=0) print(f'Keras accuracy for the model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name: str", "\"{xs_file_path}\".') else: print(f'X data for model `{model_spec.name}` already exists at \"{xs_file_path}\"') if not", "to \"{xs_file_path}\".') else: print(f'X data for model `{model_spec.name}` already exists at \"{xs_file_path}\"') if", "`{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod def _add_cli_arguments(cls, parser):", "<reponame>andrei-papou/horn<filename>models/python_cli/commands.py import os from time import time from typing import List, Type from", "model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for i in", "start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time() - start if args.iteration_time: print(f'Iteration", "time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time() - start if args.iteration_time: print(f'Iteration {i} time:", "sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass @classmethod def execute(cls, args: Args):", "model_spec is None: raise ValueError(f'Invalid model name: {args.model}') return model_spec class Command: name:", "args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys,", "is {scores[1]}') class TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument(", "name: str = 'save_data' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) xs_file_path", "from typing import List, Type from horn import save_model, save_tensor from common import", "model name: {args.model}') return model_spec class Command: name: str @classmethod def add_cli_parser(cls, sub_parsers:", "SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass @classmethod def", "execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model)", "get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been saved to \"{model_file_path}\"')", "= _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for i in range(args.num_iterations):", "been saved to \"{model_file_path}\"') class SaveDataCommand(Command): name: str = 'save_data' @classmethod def execute(cls,", "TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations',", "Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0)", "execute(cls, args: Args): raise NotImplementedError() class SaveModelCommand(Command): name: str = 'save_model' @classmethod def", "data for model \"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y data for model `{model_spec.name}` already", "range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time() - start if args.iteration_time:", "= 'test_correctness' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model,", "Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec is None: raise ValueError(f'Invalid model", "= 'save_model' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model,", "import time from typing import List, Type from horn import save_model, save_tensor from", "= get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been saved to", "None: raise ValueError(f'Invalid model name: {args.model}') return model_spec class Command: name: str @classmethod", "def execute(cls, args: Args): model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path =", "model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been saved to \"{model_file_path}\"') class", "\"{ys_file_path}\".') else: print(f'Y data for model `{model_spec.name}` already exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command):", "class SaveDataCommand(Command): name: str = 'save_data' @classmethod def execute(cls, args: Args): model_spec =", "= get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for model", "print(f'Iteration {i} time: {iteration_time * 1e6}') cumulative_time += iteration_time print(f'Keras performance for model", "model `{model_spec.name}` : {cumulative_time * 1e6}') commands: List[Type[Command]] = [ SaveModelCommand, SaveDataCommand, TestCorrectnessCommand,", "name: str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod", "for model `{model_spec.name}` : {cumulative_time * 1e6}') commands: List[Type[Command]] = [ SaveModelCommand, SaveDataCommand,", "time import time from typing import List, Type from horn import save_model, save_tensor", "in range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time() - start if", "get_model, ModelSpec from models import model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec =", "pass @classmethod def execute(cls, args: Args): raise NotImplementedError() class SaveModelCommand(Command): name: str =", "model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras", "sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass @classmethod", "def _add_cli_arguments(cls, parser): pass @classmethod def execute(cls, args: Args): raise NotImplementedError() class SaveModelCommand(Command):", "if model_spec is None: raise ValueError(f'Invalid model name: {args.model}') return model_spec class Command:", "ys_file_path) print(f'Saved Y data for model \"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y data for", "= sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass @classmethod def execute(cls, args:", "save_tensor from common import Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from models import", "model `{model_spec.name}` already exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod", "execute(cls, args: Args): model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name,", "cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass @classmethod def execute(cls, args: Args): raise NotImplementedError()", "for model `{model_spec.name}` already exists at \"{ys_file_path}\"') class TestCorrectnessCommand(Command): name: str = 'test_correctness'", "return model_spec class Command: name: str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser =", "'-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of performance measurement iterations' ) parser.add_argument( '--iteration-time',", "def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time =", "performance for model `{model_spec.name}` : {cumulative_time * 1e6}') commands: List[Type[Command]] = [ SaveModelCommand,", "parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass @classmethod def execute(cls, args: Args): raise", "@classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path", "'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for model \"{model_spec.name}\" to", "NotImplementedError() class SaveModelCommand(Command): name: str = 'save_model' @classmethod def execute(cls, args: Args): model_spec", "import List, Type from horn import save_model, save_tensor from common import Args, SubParsers,", "os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for model \"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X", "model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been saved to \"{model_file_path}\"') class SaveDataCommand(Command):", "= get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for i in range(args.num_iterations): start = time()", "str = 'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int,", "xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path)", "= 'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number", "time: {iteration_time * 1e6}') cumulative_time += iteration_time print(f'Keras performance for model `{model_spec.name}` :", "\"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X data for model `{model_spec.name}` already exists at \"{xs_file_path}\"')", "_extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec is None: raise ValueError(f'Invalid", "SaveModelCommand(Command): name: str = 'save_model' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args)", "print(f'Model has been saved to \"{model_file_path}\"') class SaveDataCommand(Command): name: str = 'save_data' @classmethod", "@classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of performance", "'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data", "{i} time: {iteration_time * 1e6}') cumulative_time += iteration_time print(f'Keras performance for model `{model_spec.name}`", "help='Number of performance measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time' )", "from models import model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model) if", "save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for model \"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y data", "model_specs.get(args.model) if model_spec is None: raise ValueError(f'Invalid model name: {args.model}') return model_spec class", "os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for model \"{model_spec.name}\" to \"{ys_file_path}\".') else: print(f'Y", "model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod def _add_cli_arguments(cls,", "verbose=0) iteration_time = time() - start if args.iteration_time: print(f'Iteration {i} time: {iteration_time *", "get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for i in range(args.num_iterations): start = time() model.predict(x=model_spec.xs,", "model \"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X data for model `{model_spec.name}` already exists at", "execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs,", "print(f'X data for model `{model_spec.name}` already exists at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys,", "TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args)", "str = 'save_data' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) xs_file_path =", "default=1000, type=int, help='Number of performance measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print iteration", "scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for the model `{model_spec.name}` is {scores[1]}')", "type=int, help='Number of performance measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time'", "if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for model \"{model_spec.name}\" to \"{xs_file_path}\".')", "`{model_spec.name}` already exists at \"{xs_file_path}\"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data", "cumulative_time += iteration_time print(f'Keras performance for model `{model_spec.name}` : {cumulative_time * 1e6}') commands:", "add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass", "get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been saved to \"{model_file_path}\"') class SaveDataCommand(Command): name: str", "has been saved to \"{model_file_path}\"') class SaveDataCommand(Command): name: str = 'save_data' @classmethod def", "print(f'Keras accuracy for the model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name: str =", "typing import List, Type from horn import save_model, save_tensor from common import Args,", "'save_data' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x')", "model_spec = model_specs.get(args.model) if model_spec is None: raise ValueError(f'Invalid model name: {args.model}') return", "str = 'save_model' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model =", "= model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for the model `{model_spec.name}` is {scores[1]}') class", "args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model,", "_extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs,", "@classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores", "iteration_time = time() - start if args.iteration_time: print(f'Iteration {i} time: {iteration_time * 1e6}')", "if args.iteration_time: print(f'Iteration {i} time: {iteration_time * 1e6}') cumulative_time += iteration_time print(f'Keras performance", "= 'save_data' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name,", "to \"{model_file_path}\"') class SaveDataCommand(Command): name: str = 'save_data' @classmethod def execute(cls, args: Args):", "_add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of performance measurement iterations'", "{args.model}') return model_spec class Command: name: str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser", "name: {args.model}') return model_spec class Command: name: str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers):", "model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if not", "xs_file_path) print(f'Saved X data for model \"{model_spec.name}\" to \"{xs_file_path}\".') else: print(f'X data for", "_extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for", "Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path)" ]
[ "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) -> object \"\"\" pass def IsBosInboundListenerRunning(self):", "IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\"", "OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\"", "Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) ->", "\"\"\" pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script):", "Creates a shallow copy of the current System.Object. Returns: A shallow copy of", "pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject):", "def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages): \"\"\"", "def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order:", "\"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom:", "type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def ZZZ(self):", "DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Tables(self:", "DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value: object) \"\"\" pass def", "def SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\" pass def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer])", "whether schema information has been omitted from the payload. \"\"\" pass def Dispose(self):", "DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset]", "bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass", "pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "\"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool \"\"\"", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def ReissueMessage(self,messageId):", "pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self:", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda", "\"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool \"\"\" pass", "PrintRule \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "(bool,str) \"\"\" pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool \"\"\"", "Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder])", "\"\"\" GetVersion(self: General) -> str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str)", "pass def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch) -> Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value):", "System.Runtime.Serialization.SerializationInfo instance. context: The streaming context. \"\"\" pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs:", "PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) ->", "RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet) ->", "DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification]", "str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self:", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "int) -> CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType:", "str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs)", "\"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool \"\"\" pass def Sleep(self,seconds): \"\"\" Sleep(self:", "Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) ->", "\"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id:", "x; see x.__class__.__doc__ for signature \"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\"", "DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] \"\"\" pass", "to delete the current System.MarshalByRefObject object's identity,which will cause the object to be", "DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink])", "resources. disposing: true to release both managed and unmanaged resources; false to release", "-> (int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\" pass", "SaveCache(self: General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\"", "pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels):", "ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str) -> str \"\"\" pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self:", "General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\" pass def StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\" pass", "\"\"\" pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str) -> bool \"\"\" pass def", "current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new", "AddTaskErpLockingTask(self: General) \"\"\" pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self):", "Debug(msg: str) \"\"\" pass @staticmethod def Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\"", "\"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo):", "\"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def", "LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) \"\"\"", "Outbound,shipperId: str) -> SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) ->", "pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine):", "str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) ->", "\"\"\" GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\" pass def GetErpName(self): \"\"\" GetErpName(self: General) ->", "bool \"\"\" pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value: object) \"\"\" pass def", "(bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool \"\"\"", "\"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def", "def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self:", "DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass", "\"\"\" pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass def", "ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str)", "\"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) \"\"\" pass def", "pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry):", "\"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool \"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self:", "pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types):", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an", "__repr__(self: object) -> str \"\"\" pass UserName=property(lambda self: object(),lambda self,v: None,lambda self: None)", "pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def", "def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\"", "System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current", "pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\" pass def GetCustomersPending(self,customers):", "AddPrintJob,blobId: int,blobName: str) -> Guid \"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int)", "\"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def", "(int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) \"\"\" pass", "GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetStdLibRoot(path):", "pass def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\"", "def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically invokes (late-bound) the method", "def __new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer) \"\"\" pass class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher:", "\"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def", "pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item): \"\"\"", "\"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "-> List[int] \"\"\" pass def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) -> List[Operator] \"\"\" pass", "def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\"", "Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) ->", "Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) ->", "pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass", "DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass", "pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones):", "\"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript)", "\"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\"", "DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs]", "copy of the current System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid)", "def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup:", "the binary or XML stream. info: The System.Runtime.Serialization.SerializationInfo instance. context: The streaming context.", "\"\"\" GetLibRoot() -> str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int) ->", "IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Inventory(MarshalByRefObject):", "\"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) ->", "\"\"\" pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule \"\"\" pass def", "List[Notification] \"\"\" pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\" pass", "def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass def GetItemVendors(self,args,vendors): \"\"\"", "Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) ->", "str,cacheKey: int) -> object \"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object])", "IExtendedServiceLocator,type: Type) -> bool \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "\"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self:", "CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\"", "\"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str])", "\"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self:", "\"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum)", "def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\" pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self:", "\"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass", "def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\"", "PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool)", "pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\" pass def GetLogLines(self,args):", "-> (bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) ->", "DataSet) -> DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) ->", "DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) -> List[Printer]", "def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock): \"\"\"", "StopProfiler(self: General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations)", "pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts):", "class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\" pass def DeleteRemotePublisher(self,req):", "the specified System.Delegate to form a new delegate. follow: The delegate to combine", "objects that are the arguments to pass to the method represented by the", "bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject:", "-> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\"", "PyLogger(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return PyLogger()", "LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass def LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\"", "pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "\"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self:", "pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self:", "GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch)", "NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) ->", "def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\"", "bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "GetCountGroupsById(self: Inventory,id: int) -> CountGroup \"\"\" pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum)", "initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\"", "\"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveModule(self,module):", "\"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args:", "\"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically invokes (late-bound) the method represented by", "\"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self:", "DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass", "System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the event data. \"\"\" pass def", "str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom:", "Array[str] \"\"\" pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass", "SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key: str) \"\"\" pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint:", "GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool \"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs)", "CountGroup) -> (bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup)", "remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted", "CacheKey) -> (bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str)", "Inventory) -> object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) ->", "(int,List[str]) \"\"\" pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) -> List[ICachable] \"\"\" pass def", "GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\" pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str]", "GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self:", "DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification])", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: Options(self: IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda self:", "General) -> (int,Users) \"\"\" pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) -> (int,Users) \"\"\"", "General,script: str) -> object \"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int) ->", "pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\"", "def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\"", "OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs when a System.Data.DataTable is removed from a", "DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg): \"\"\" DeleteModule(self:", "PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo):", "def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self:", "str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str)", "-> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\"", "int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\" pass", "NumberGeneration,rangeId: int) -> int \"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) ->", "GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self:", "signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass", "its clone,which will cause remoting client calls to be routed to the remote", "ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str \"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType:", "def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets): \"\"\"", "Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase])", "bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders):", "\"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings \"\"\" pass def", "ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode:", "ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\" pass def ProcessDirectOrder(self,args): \"\"\"", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an", "GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self:", "General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs)", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an", "-> DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\"", "pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders):", "ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order:", "\"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound)", "CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str)", "\"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg:", "return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self:", "DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType]", "str \"\"\" pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\" pass def", "\"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass def", "invocation list of this System.MulticastDelegate that is equal to the specified delegate. value:", "copy of the current System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs])", "AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs)", "GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self:", "of the class\"\"\" def Clone(self): \"\"\" Clone(self: DataSet) -> DataSet \"\"\" pass def", "GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass @staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() ->", "self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda self,v: None,lambda", "-> bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\"", "def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId):", "Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes:", "PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\" pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs)", "pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\" pass", "DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str)", "(bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) \"\"\" pass", "not require arguments. Returns: The object returned by the method represented by the", "bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\"", "GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) ->", "-> IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\"", "\"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] \"\"\" pass def", "def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) -> bool \"\"\"", "\"\"\" OnPythonEngineBooted(self: General) \"\"\" pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass def", "self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: StockManager(self: Outbound) ->", "\"\"\"hardcoded/mock instance of the class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200", "str,userId: int) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates", "pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) \"\"\" pass def", "-> bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\"", "RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer)", "Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines)", "pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg):", "GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\" pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType:", "def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) \"\"\" pass def GetZonesAll(self,zones): \"\"\"", "Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\"", "Exception,serviceType: Type,key: str) -> str \"\"\" pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) ->", "\"\"\" pass def SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule) -> bool \"\"\" pass def", "\"\"\" pass class BusinessLayerExtensions(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the", "(int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass", "\"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\"", "x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x;", "instance of the class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject):", "format of the serialized representation of the DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context:", "CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey", "GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self:", "GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\"", "\"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self:", "pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey):", "GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) ->", "pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject):", "\"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self: None)", "IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool \"\"\" pass", "pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) -> (int,Users) \"\"\" pass def GetUsersInactive(self,users): \"\"\"", "\"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args:", "MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs)", "event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the event data. \"\"\" pass def OnRemoveRelation(self,*args):", "WrapException(ex): \"\"\" WrapException(ex: Exception) -> RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer)", "CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\" pass def", "\"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args):", "Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine]", "General) -> (bool,str) \"\"\" pass def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device]", "see x.__class__.__doc__ for signature \"\"\" pass class IExtendedServiceLocator: # no doc def ZZZ(self):", "* from ..__init__ import * # no functions # classes class AppHost(object): \"\"\"", "int,y: int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\"", "DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass", "GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs)", "CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass", "DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass", "\"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary)", "Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging:", "pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task): \"\"\"", "\"\"\" pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) -> bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license):", "GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode:", "def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self:", "pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool \"\"\" pass def", "pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self:", "str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs])", "\"\"\" pass class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args): \"\"\"", "deserialization in remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning) -> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate):", "General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y] \"\"\"", "pass def SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\" pass def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg:", "\"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self:", "Inbound) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders)", "def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "-> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda", "-> Task \"\"\" pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass def", "GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors)", "class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem)", "DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass", "ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange])", "def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self): \"\"\"", "General,unsafe: bool) \"\"\" pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass def StopProfiler(self):", "\"\"\" pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\"", "pass @staticmethod def WrapException(ex): \"\"\" WrapException(ex: Exception) -> RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator):", "\"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def", "def GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id:", "RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\"", "ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs])", "\"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass def StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\" pass def", "List[int] \"\"\" pass def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) -> List[Operator] \"\"\" pass def", "\"\"\" pass @staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\" pass", "def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\" pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self:", "-> (bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) -> int \"\"\"", "pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings:", "GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str)", "DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting]", "-> (bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\"", "to its clone,which will cause remoting client calls to be routed to the", "\"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\" pass def", "BaseException) \"\"\" pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject):", "pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject):", "NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert:", "Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns", "GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self:", "GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self:", "Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\"", "class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) ->", "pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the event data. \"\"\" pass def OnRemoveRelation(self,*args): \"\"\"", "\"\"\" CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg:", "SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: General) ->", "pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing)", "\"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid:", "-> (bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\"", "DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self:", "of the class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity:", "to the specified delegate. value: The delegate to search for in the invocation", "Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings()", "GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\"", "\"\"\" pass def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines this System.Delegate", "GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self:", "TransportPackage) -> (bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) ->", "-> bool \"\"\" pass def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int) -> str \"\"\"", "\"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def", "str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) ->", "(int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\" pass def", "GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId:", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider:", "TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\"", "CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink])", "new System.Delegate without value in its invocation list; otherwise,this instance with its original", "pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks):", "pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy", "(bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass", "\"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass def", "GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets)", "\"\"\" x.__getitem__(y) <==> x[y] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass", "pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass def InitializeLifetimeService(self):", "RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" class ICentralAuthoritySystem: # no doc def ZZZ(self): \"\"\"hardcoded/mock", "pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\" pass def", "object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) ->", "pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General)", "\"\"\" pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str)", "-> Scanners \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) -> object \"\"\" pass", "(bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def SaveBatch(self,batch):", "DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext)", "GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) ->", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\"", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls:", "def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type): \"\"\"", "pass @staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args): \"\"\"", "pass def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int) -> (bool,Tag) \"\"\" pass def GetTagsAll(self,tags):", "class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def IsRegistered(self,type=None): \"\"\"", "-> LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\"", "-> bool \"\"\" pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\" pass def OutputCacheStatusToLog(self):", "\"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\" pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) ->", "GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId:", "\"\"\" CheckServerHealth(self: General) -> ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey)", "object \"\"\" pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\" pass def", "\"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def", "\"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone)", "def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey):", "Mailer(object): \"\"\" Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Mailer()", "pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass def GetCount(self,*__args):", "DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int]", "pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self):", "Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey:", "None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self:", "-> DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\"", "GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int) -> CountGroup \"\"\" pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self:", "\"\"\" pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\"", "AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user: User)", "pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject):", "-> IAsyncResult \"\"\" pass def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines", "ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass", "Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) ->", "Task \"\"\" pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass def __init__(self,*args):", "None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda", "\"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass", "Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) \"\"\" pass", "DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass", "BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult \"\"\" pass def CombineImpl(self,*args): \"\"\"", "\"\"\" CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) \"\"\" pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General)", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs)", "GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText:", "General,tag: str) -> str \"\"\" pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) -> (int,Users)", "PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) ->", "shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current", "or XML stream. info: The System.Runtime.Serialization.SerializationInfo instance. context: The streaming context. \"\"\" pass", "-> CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) ->", "CacheKey,items: WarehouseTransferItems) -> bool \"\"\" pass def SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\" pass", "GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General)", "str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) ->", "Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool \"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject:", "int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def ExecuteSummaries(self):", "Inventory,lp: LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo:", "pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey):", "NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass @staticmethod def RemoveBatch(batch): \"\"\"", "\"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object \"\"\" pass", "-> (TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) ->", "-> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages)", "GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General)", "(int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\" pass def", "DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass", "\"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg:", "ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self:", "\"\"\" GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound)", "(int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) \"\"\" pass", "WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\" pass", "Info(msg): \"\"\" Info(msg: str) \"\"\" pass @staticmethod def Trace(msg): \"\"\" Trace(msg: str) \"\"\"", "instance of the class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "\"\"\" pass def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\"", "General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts)", "GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\" pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General)", "-> (int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int) -> (bool,Zone) \"\"\"", "Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) ->", "IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool \"\"\" pass @staticmethod def WrapException(ex): \"\"\"", "ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\" pass def", "-> ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) -> object \"\"\" pass", "GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) ->", "\"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\" pass def", "object) -> object Creates a shallow copy of the current System.Object. Returns: A", "Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) ->", "the specified System.Data.DataSet property is about to change. name: The name of the", "\"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self: AppHost)", "no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns", "\"\"\" Clone(self: DataSet) -> DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context:", "by the delegate. \"\"\" pass def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str", "__new__(self,object,method): \"\"\" __new__(cls: type,object: object,method: IntPtr) \"\"\" pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject):", "\"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "NumberGeneration,args: UsedNumberArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User]) ->", "SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool \"\"\" pass def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds:", "General) -> List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent)", "to the method represented by the current delegate.-or- null,if the method represented by", "DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]]", "DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\"", "\"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass def", "the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes", "General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) ->", "GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self:", "CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) ->", "RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self:", "GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\"", "\"\"\" ClearResourceCache(self: General) \"\"\" pass def CompileScript(self,script): \"\"\" CompileScript(self: General,script: str) -> List[PythonError]", "pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass", "\"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy:", "-> UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass", "str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines)", "\"\"\" pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def", "\"\"\" pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\" pass def InitializeLifetimeService(self):", "\"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args:", "RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key:", "-> (bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool", "pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool \"\"\" pass", "\"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode:", "def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self): \"\"\"", "CreateContainer(self): \"\"\" CreateContainer(self: AppHost) -> UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings:", "\"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass def", "method represented by the current delegate.-or- null,if the method represented by the current", "DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass", "DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask]", "instance of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\" pass def HasNotifications(self,filterOn): \"\"\"", "Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\" pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs)", "None) \"\"\"Get: CurrentLicense(self: General) -> License Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda", "\"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint:", "DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int)", "-> (bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str) \"\"\" pass def", "GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id:", "Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Tables(self: DataSet) ->", "of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds):", "GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate)", "ModuleArgs) -> bool \"\"\" pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel]", "\"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool", "\"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable", "-> bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) ->", "object) -> str \"\"\" pass UserName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool \"\"\" pass", "\"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch:", "GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetServerDate(self): \"\"\" GetServerDate(self: General)", "def SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule) -> bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\"", "GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation) \"\"\" pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId:", "DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\"", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda self:", "GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) -> (int,Users) \"\"\" pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General)", "\"\"\" InitializeLifetimeService(self: Printing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "the class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5", "def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg): \"\"\"", "-> (bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\"", "SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool \"\"\" pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName:", "Inbound,rmaOrders: RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\"", "Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int)", "DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self:", "PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject):", "pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) -> int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset):", "change. name: The name of the property that is about to change. \"\"\"", "BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool \"\"\"", "pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line):", "-> (bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems)", "-> bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference:", "CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange])", "pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\"", "\"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains", "def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str) -> object \"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\"", "DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count]", "AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) ->", "-> (bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) \"\"\"", "RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass", "ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat',", "\"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str", "RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\"", "\"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str", "\"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass", "\"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId:", "instance of the class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self: AppHost) -> UnityContainer \"\"\" pass", "-> bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args):", "A delegate that is the new root of the System.MulticastDelegate invocation list. \"\"\"", "DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid)", "\"\"\" InitializeLifetimeService(self: NotificationCenter) -> object \"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId:", "\"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def", "\"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self:", "(bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass", "str) -> Guid \"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule", "(int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str)", "-> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\"", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) -> object \"\"\" pass def IsProfilerRunning(self):", "def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\"", "GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int \"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self:", "General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) ->", "managed and unmanaged resources; false to release only unmanaged resources. \"\"\" pass def", "str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs]", "a new identity when it is marshaled across a remoting boundary. A value", "DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs]", "SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self:", "\"\"\" Tables=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Tables(self: DataSet) -> DataTableCollection", "\"\"\" LoadCache(self: General) \"\"\" pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings)", "@staticmethod def __new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer) \"\"\" pass class General(MarshalByRefObject): \"\"\" General(stockManager:", "DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool \"\"\"", "SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\" pass def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) ->", "pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool \"\"\" pass", "OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\" pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass", "class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def CreateContainer(self): \"\"\"", "LogoutClient(self: General) \"\"\" pass def LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\" pass def MemberwiseClone(self,*args):", "CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self:", "def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\"", "General,arg: AddModuleArgs) -> bool \"\"\" pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str) ->", "-> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates", "GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) ->", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self): \"\"\" __new__(cls: type) __new__(cls:", "ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str) -> object \"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self:", "str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) ->", "\"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) ->", "pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self:", "-> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\"", "by the System.ComponentModel.MarshalByValueComponent and optionally releases the managed resources. disposing: true to release", "'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General)", "IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args):", "pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args:", "RemoveBatch(batch: Batch) \"\"\" pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass def", "def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\"", "'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class", "str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode:", "pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint):", "GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass def GetPrinters(self): \"\"\" GetPrinters(self:", "\"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue)", "bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) \"\"\" pass", "\"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args:", "\"\"\" pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str) \"\"\" pass def ChangeItemBarcode(self,args): \"\"\"", "DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation object is removed from a System.Data.DataTable. relation:", "GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs])", "-> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\"", "(int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) ->", "IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self:", "GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) ->", "of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's", "class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) -> Array[Color] \"\"\"", "\"\"\" pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\" pass def", "-> List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\"", "str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId:", "\"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def", "GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) \"\"\" pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self:", "\"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) \"\"\" pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General)", "-> (int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) \"\"\"", "str \"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) ->", "pass def CompileScript(self,script): \"\"\" CompileScript(self: General,script: str) -> List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy):", "pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool \"\"\" pass def", "GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self:", "def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool \"\"\" pass def UploadModule(self,arg):", "pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items):", "List[Operator] \"\"\" pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs)", "returned by the method represented by the delegate. \"\"\" pass def EndInvoke(self,result): \"\"\"", "Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None", "\"\"\" GetUserByUserId(self: General,userId: int) -> (bool,User) \"\"\" pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username:", "usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which", "-> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet)", "GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str) -> bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self:", "def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\"", "Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode:", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager:", "def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\" pass", "GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\"", "str,countGroup: CountGroup) -> (bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup:", "ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self:", "\"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\"", "PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations]", "\"\"\" SaveBatch(self: Outbound,batch: Batch) -> Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId:", "import * # no functions # classes class AppHost(object): \"\"\" AppHost() \"\"\" def", "\"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs)", "-> (bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications)", "def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetServerDate(self): \"\"\" GetServerDate(self:", "(int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass", "Delegate,args: Array[object]) -> object Dynamically invokes (late-bound) the method represented by the current", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def Clone(self): \"\"\" Clone(self: DataSet) ->", "bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\"", "by the current delegate.-or- null,if the method represented by the current delegate does", "DataRelation) Occurs when a System.Data.DataRelation object is removed from a System.Data.DataTable. relation: The", "Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) -> object", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an", "CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self:", "x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str)", "GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) \"\"\" pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self:", "SaveCache(self): \"\"\" SaveCache(self: General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) ->", "DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self:", "@staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda self: object(),lambda", "-> (bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\"", "\"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass def", "\"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" class ICentralAuthoritySystem: # no doc def ZZZ(self):", "-> str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings)", "Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass def GetPrinters(self):", "-> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\"", "str) \"\"\" pass @staticmethod def Trace(msg): \"\"\" Trace(msg: str) \"\"\" pass @staticmethod def", "pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int \"\"\" pass def", "\"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg:", "-> (int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) \"\"\"", "-> (int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\"", "AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\"", "\"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey:", "pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject):", "(int,Users) \"\"\" pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) -> (int,Users) \"\"\" pass def", "pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool \"\"\" pass", "instance of the class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "x; see x.__class__.__doc__ for signature \"\"\" pass class IExtendedServiceLocator: # no doc def", "General,warehouseCode: str) -> (bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str) ->", "\"\"\"Get: CurrentLicense(self: OfflineScanning) -> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\"", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject):", "GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self:", "IExtendedServiceLocator: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IExtendedServiceLocator()", "def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\"", "\"\"\" GetServerDate(self: General) -> DateTime \"\"\" pass def GetSessions(self,sessions): \"\"\" GetSessions(self: General) ->", "pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\" pass def GetErpName(self): \"\"\"", "EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\"", "None) \"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda", "\"\"\" pass def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def", "Returns: A shallow copy of the current System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\"", "GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self:", "def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\" pass def GetOperators(self): \"\"\"", "GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int)", "RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch) -> Batch", "PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self:", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) -> object \"\"\" pass def MemberwiseClone(self,*args):", "(int,Zones) \"\"\" pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) -> (int,Zones) \"\"\" pass def", "NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def", "\"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject:", "-> CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type:", "-> (bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) \"\"\"", "def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass def BatchChangeCountType(self,filterBy,type):", "GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList)", "General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str) ->", "\"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp:", "Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) ->", "ExecuteScript(self: General,script: str) -> object \"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int)", "class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddUsedNumber(self,args): \"\"\"", "\"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter:", "\"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args:", "\"\"\" __new__(cls: type,container: IUnityContainer) \"\"\" pass class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue)", "UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass def __init__(self,*args):", "str) -> bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) ->", "pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\" pass def GetAllExecutionTypes(self): \"\"\"", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\"", "def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\" pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self:", "InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment:", "\"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self:", "PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey \"\"\" pass def", "NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\" pass def", "that is passed during deserialization of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating", "IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "\"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "str) -> bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str)", "-> List[NumberRange] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) -> object \"\"\" pass", "str,scope: Dictionary[str,object]) -> object \"\"\" pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs) ->", "pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\"", "pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage):", "bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool \"\"\"", "def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool \"\"\" pass def", "-> int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int \"\"\"", "ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server'", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer:", "GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self:", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\"", "Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders)", "(int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) \"\"\" pass", "-> CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) ->", "int \"\"\" pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath):", "AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\"", "\"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self:", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class IExtendedServiceLocator:", "'Error', 'Fatal', 'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\"", "def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,implementationContainer=None):", "CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\"", "ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment):", "General,line: PrintLineBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label:", "def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self:", "ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat',", "CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self:", "Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items:", "list. Returns: If value is found in the invocation list for this instance,then", "General) -> (int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations)", "\"\"\" pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) -> (int,Devices) \"\"\" pass def GetErpLocks(self,locks):", "\"\"\" pass def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch) -> Batch \"\"\" pass def", "-> List[PrintRule] \"\"\" pass def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\" pass", "of the System.MulticastDelegate invocation list. \"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object])", "-> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\"", "def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\"", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "from a System.Data.DataSet. table: The System.Data.DataTable being removed. \"\"\" pass def RaisePropertyChanging(self,*args): \"\"\"", "Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode:", "\"\"\" pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self:", "pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses):", "Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int)", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls:", "Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) ->", "def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\" pass def InitializeLifetimeService(self):", "Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3", "pass @staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\"", "unmanaged resources used by the System.ComponentModel.MarshalByValueComponent and optionally releases the managed resources. disposing:", "BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) ->", "str) -> (bool,List[str],License) \"\"\" pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) -> ServerHealthEnum \"\"\"", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) -> object \"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId:", "-> CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def", "CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User])", "PythonModule) -> bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) ->", "represents a DataSet serialized in its binary format,false otherwise. \"\"\" pass def OnPropertyChanging(self,*args):", "Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files", "pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets):", "Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self:", "BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass", "relation: The System.Data.DataRelation being removed. \"\"\" pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable)", "IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass", "\"\"\" Debug(msg: str) \"\"\" pass @staticmethod def Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException)", "int) -> (int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers)", "def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\"", "-> DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs) -> bool \"\"\"", "Task[Stream] \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\"", "\"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self:", "Messaging=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self:", "GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self:", "-> (int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates)", "PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\"", "GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) -> object", "def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\"", "\"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self:", "represented by the current delegate. args: An array of objects that are the", "\"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def", "of the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\" pass def", "pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries):", "pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\" pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message:", "Inventory) \"\"\" pass def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass", "DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self:", "EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) ->", "Removes an element from the invocation list of this System.MulticastDelegate that is equal", "class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "\"\"\" pass def GetErpSettings(self): \"\"\" GetErpSettings(self: General) -> SystemSettings \"\"\" pass def GetErpSettingsTable(self):", "Occurs when a System.Data.DataTable is removed from a System.Data.DataSet. table: The System.Data.DataTable being", "pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages):", "General,zoneId: int) -> (int,Users) \"\"\" pass def GetVersion(self): \"\"\" GetVersion(self: General) -> str", "pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers):", "bool) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\"", "def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) ->", "DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool \"\"\"", "IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns", "\"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) ->", "with the specified System.Delegate to form a new delegate. follow: The delegate to", "\"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes", "for signature \"\"\" pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def", "\"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool \"\"\"", "(int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\" pass", "def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool \"\"\"", "x[y] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "\"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass def", "\"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass def", "Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode:", "\"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self:", "\"\"\" pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data: str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights):", "(int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\" pass", "CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self:", "NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationSummary() instance=ZZZ()", "this System.Delegate with the specified System.Delegate to form a new delegate. follow: The", "\"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass def", "self: object(),lambda self,v: None,lambda self: None) class Mailer(object): \"\"\" Mailer() \"\"\" def ZZZ(self):", "Returns: A shallow copy of the current System.Object. \"\"\" pass def SaveConfiguration(self,model): \"\"\"", "DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting])", "CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg): \"\"\" CreateUser(self:", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) -> object \"\"\" pass def MemberwiseClone(self,*args):", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,general): \"\"\"", "delegate. \"\"\" pass def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\" pass", "-> ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str) -> (bool,Item) \"\"\"", "\"\"\" pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object \"\"\" pass", "\"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self:", "static method represented by the current System.MulticastDelegate. Returns: A static method represented by", "\"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\"", "\"\"\" pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def", "Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] \"\"\" pass", "-> (int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\"", "PrintLabel) -> bool \"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs]", "\"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\" pass def", "General,resourseSet: str) -> (int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int) ->", "GetPendingPrintLineCount(self: General,key: CacheKey) -> int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str)", "important from System.Collections.Generic import * from ..__init__ import * # no functions #", "(int,Countries) \"\"\" pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\" pass def", "GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str)", "\"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str) -> bool \"\"\" pass def", "pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels):", "CacheKey,boxGuid: Guid,label: PrintLabel) -> bool \"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs])", "\"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def", "\"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\"", "GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine])", "-> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult", "Hashtable \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) -> object \"\"\" pass def", "CacheKey,licensePlateId: int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\"", "-> (int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\"", "\"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\" pass def", "GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey)", "\"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg:", "-> Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] \"\"\"", "\"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass def", "def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self:", "DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass", "GetZonesAll(self: General) -> (int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) ->", "pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg):", "FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str \"\"\" pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self:", "pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self:", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda", "StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\" pass def StartProfiler(self): \"\"\" StartProfiler(self:", "DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass", "an instance of the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) -> int", "Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\" pass", "DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self:", "AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid \"\"\" pass def CopyPrintRule(self,printRuleId):", "pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject):", "RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject:", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue):", "(int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\" pass def", "\"\"\" pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def", "AddPrintJob) -> Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str)", "\"\"\" GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key:", "\"\"\" GetErpName(self: General) -> str \"\"\" pass def GetErpSettings(self): \"\"\" GetErpSettings(self: General) ->", "return General() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self:", "def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str) -> (bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\"", "def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool \"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\"", "\"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass def DeleteCountFromCache(self,arg): \"\"\"", "PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self:", "the object to be assigned a new identity when it is marshaled across", "(bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str) -> (bool,Zone) \"\"\" pass", "str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str) ->", "General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\" pass", "\"\"\"hardcoded/mock instance of the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "\"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def", "\"\"\" GetTagsAll(self: General) -> (int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str)", "AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args):", "GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] \"\"\" pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self:", "DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int])", "self: None) class IApplicationSettings: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) -> object \"\"\" pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self:", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def __init__(self,*args):", "GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args:", "def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\"", "see x.__class__.__doc__ for signature \"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging)", "def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool \"\"\" pass def SaveSetting(self,memberName,value):", "GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int \"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str)", "AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self:", "-> (bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) -> (int,Devices) \"\"\" pass", "IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self:", "CacheKey) \"\"\" pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str) -> str \"\"\" pass", "(bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool) -> (int,Zones) \"\"\" pass", "def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup:", "DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs]", "-> (bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) \"\"\"", "SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message: str) \"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint:", "\"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def", "pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber):", "GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\" pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) ->", "self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda", "str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count) \"\"\" pass", "represented by the current System.MulticastDelegate. Returns: A static method represented by the current", "Outbound,args: PrintPackageSlipArgs) -> bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label:", "\"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) ->", "-> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\"", "GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str)", "CacheKey) -> bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) ->", "\"\"\" pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int) -> (bool,Device) \"\"\" pass def", "def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args): \"\"\"", "Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) ->", "class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "StartProfiler(self: General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass", "(int,Users) \"\"\" pass def GetVersion(self): \"\"\" GetVersion(self: General) -> str \"\"\" pass def", "GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "int) -> (bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings)", "None) \"\"\"Get: Options(self: IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self:", "ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message:", "-> (bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str) -> bool \"\"\"", "pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers \"\"\" pass def", "\"\"\" StartBosInboundListener(self: OfflineScanning) -> bool \"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file:", "-> PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass", "class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def RestartGooglePubSubServices(self): \"\"\"", "str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass", "of the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup):", "Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Messaging()", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "General,tag: str,data: str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) ->", "None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v:", "None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda self,v: None,lambda self:", "Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args:", "pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args):", "\"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\"", "pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings):", "def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent \"\"\" pass def GetFileTypes(self): \"\"\"", "def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\"", "disposing: true to release both managed and unmanaged resources; false to release only", "DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines", "def __new__(self): \"\"\" __new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\" pass def __reduce_ex__(self,*args):", "\"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] \"\"\" pass def", "-> bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool", "GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages): \"\"\" GetPackages(self:", "pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones):", "int) -> PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass", "\"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\"", "pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) -> Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\"", "bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) \"\"\"", "Decimal,label: PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool \"\"\"", "GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\"", "General) -> SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self:", "pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey \"\"\"", "\"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo):", "pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str) -> (int,Tags) \"\"\" pass def GetTagsByType(self,target,tags):", "return Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None", "def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId:", "new delegate. follow: The delegate to combine with this delegate. Returns: A delegate", "MulticastDelegate,value: Delegate) -> Delegate Removes an element from the invocation list of this", "pass def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg):", "def __new__(self,object,method): \"\"\" __new__(cls: type,object: object,method: IntPtr) \"\"\" pass def __reduce_ex__(self,*args): pass class", "Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "signature \"\"\" pass @staticmethod def __new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer) \"\"\" pass class", "CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg): \"\"\" CreateModule(self:", "\"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy:", "\"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\"", "DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) ->", "(int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\" pass def", "Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions:", "def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\"", "def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\"", "\"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int \"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General)", "int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments)", "class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc'", "def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\"", "str \"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str", "PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self:", "def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg): \"\"\"", "GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture:", "def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType):", "(int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass", "ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self:", "General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) ->", "System.ComponentModel.MarshalByValueComponent and optionally releases the managed resources. disposing: true to release both managed", "Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId:", "\"\"\" pass def __repr__(self,*args): \"\"\" __repr__(self: object) -> str \"\"\" pass UserName=property(lambda self:", "GetLibRoot(): \"\"\" GetLibRoot() -> str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int)", "DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders]", "-> int \"\"\" pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def", "GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey)", "instance of the class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "(bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) \"\"\" pass", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance", "def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] \"\"\" pass def EditRemotePublisher(self,req): \"\"\"", "pass UserName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: UserName(self: CallerContext) -> str", "def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object \"\"\" pass def ExecuteScriptWithScope(self,script,scope):", "of the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) -> int \"\"\" pass", "the invocation list. Returns: If value is found in the invocation list for", "\"\"\" pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation object", "bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass", "of the class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "General,name: str) -> (bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str) ->", "str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) ->", "DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent]", "-> Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) \"\"\"", "binary format,false otherwise. \"\"\" pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the", "DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) ->", "SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x: int,y: int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self:", "RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) ->", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an", "GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\"", "\"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args:", "def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\"", "list. \"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically invokes", "\"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\"", "def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) \"\"\" pass def GetCopyOfCache(self): \"\"\"", "DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\"", "SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "\"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id:", "def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg): \"\"\"", "General,id: str) -> (bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents)", "\"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\" pass def ProcessDirectOrder(self,args):", "\"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\"", "def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid):", "CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey])", "def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\"", "-> (int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer \"\"\"", "RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\"", "None,lambda self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning) -> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object:", "def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\" pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data:", "GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId:", "AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int \"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\"", "Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\" pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) ->", "self,v: None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda", "class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "\"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def", "\"\"\" pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int) -> ICachable \"\"\" pass def", "DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass", "pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args):", "search for in the invocation list. Returns: If value is found in the", "def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\"", "-> (int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\" pass", "of the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "\"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool \"\"\"", "\"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) \"\"\" pass def", "pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches):", "StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General)", "def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) \"\"\" pass def GetDeviceById(self,id,device): \"\"\"", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate)", "pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject):", "PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs)", "Batch) \"\"\" pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate \"\"\" pass", "pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id): \"\"\"", "pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts):", "\"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\"", "\"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones) \"\"\" pass", "GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str)", "__new__(cls: type,object: object,method: IntPtr) \"\"\" pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager:", "its invocation list; otherwise,this instance with its original invocation list. \"\"\" pass def", "-> (int,Customers) \"\"\" pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\" pass", "def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args): \"\"\"", "\"\"\" pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) -> Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label):", "General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes])", "-> HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\"", "GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a static method represented by the current System.MulticastDelegate.", "GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum)", "Printing,args: PrintPickingListArgs) -> bool \"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) ->", "General) \"\"\" pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user: User) -> bool \"\"\"", "General) -> License Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self:", "PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid)", "\"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool \"\"\" pass def", "\"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int", "Messaging,messageId: Guid) -> IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs)", "CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) ->", "BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink:", "\"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\"", "def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\"", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: Tables(self: DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None", "class Mailer(object): \"\"\" Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines:", "(x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000", "def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\" pass def", "\"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self:", "\"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) ->", "-> (bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) \"\"\"", "int) \"\"\" pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass", "DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId):", "pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\" pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag:", "CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self:", "DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int)", "def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations):", "IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool \"\"\" pass def __init__(self,*args):", "\"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity)", "\"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId:", "pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int) -> ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode):", "GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) \"\"\" pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int)", "\"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType):", "\"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool \"\"\"", "IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object \"\"\"", "DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass", "GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs)", "GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self:", "\"\"\" pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self):", "Guid) -> HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines)", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) -> object \"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\"", "\"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\" pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self:", "ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs])", "pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int \"\"\" pass def AddTaskAutoDisposeTask(self):", "StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool", "ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) -> object \"\"\" pass def", "MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self:", "\"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def", "pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\" pass def", "Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) ->", "\"\"\" pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool) -> (int,Zones) \"\"\" pass def", "CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def", "pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber):", "FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\" pass def", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\"", "GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId:", "PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self:", "no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns", "bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass def", "Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\"", "def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch:", "\"\"\" pass @staticmethod def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod", "is found in the invocation list for this instance,then a new System.Delegate without", "in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. Returns:", "GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args:", "PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey \"\"\" pass", "-> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation", "\"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\"", "str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn', ]", "(int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\" pass def", "@staticmethod def GetLibRoot(): \"\"\" GetLibRoot() -> str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self:", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general:", "pass @staticmethod def __new__(self,object,method): \"\"\" __new__(cls: type,object: object,method: IntPtr) \"\"\" pass def __reduce_ex__(self,*args):", "DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams)", "instance that is passed during deserialization of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration", "DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches)", "General,key: CacheKey,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label:", "in its binary format,false otherwise. \"\"\" pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs)", "\"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject:", "\"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId):", "def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject): \"\"\"", "int) -> (bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device)", "None,lambda self: None) class IApplicationSettings: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging)", "-> str \"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings)", "def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels): \"\"\"", "pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) \"\"\" pass def", "General,resourceSet: str,culture: str) -> (bool,Translation) \"\"\" pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str)", "pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool \"\"\" pass def", "def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self:", "pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool \"\"\" pass def", "arguments to pass to the method represented by the current delegate.-or- null,if the", "Scanners \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) -> object \"\"\" pass def", "a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the", "-> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\"", "def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\" pass def", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line:", "ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId',", "def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid) -> IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\"", "GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) ->", "GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging:", "-> (bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\" pass", "int,items: List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine]", "def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\"", "pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName):", "(x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt'", "GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId:", "\"\"\" pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) -> Hashtable \"\"\" pass def InitializeLifetimeService(self):", "GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs)", "the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult", "-> WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) -> object \"\"\" pass", "General) -> DateTime \"\"\" pass def GetSessions(self,sessions): \"\"\" GetSessions(self: General) -> (int,Sessions) \"\"\"", "GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) ->", "# no doc # no important from System.Collections.Generic import * from ..__init__ import", "str) -> List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue]", "Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\"", "\"\"\" GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass def GetMacAddress(self): \"\"\" GetMacAddress(self: General)", "Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey)", "def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\"", "\"\"\" GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent \"\"\" pass def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue)", "\"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args:", "\"\"\" ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==>", "ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self:", "RemotePublishing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "\"\"\" CheckHookVersions(self: General) -> bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str)", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "instance of the class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) ->", "\"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key:", "__new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self: object(),lambda self,v:", "__init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__", "self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" class ICentralAuthoritySystem: # no doc", "\"\"\" SetSessionTimeout(self: General) \"\"\" pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data: str) \"\"\"", "pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress):", "pass def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) -> Scanners \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "Delegate Removes an element from the invocation list of this System.MulticastDelegate that is", "DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass", "SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug'", "int \"\"\" pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\"", "def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\"", "\"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass", "Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity:", "Guid) -> IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) ->", "Messaging,messageId: Guid) \"\"\" pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass def", "\"\"\" DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DataSet() instance=ZZZ()", "pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool \"\"\" pass", "\"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self:", "str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo:", "System.Runtime.Serialization.StreamingContext object. Returns: true if the specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in", "\"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args:", "CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CachedSettings(self: General) -> SystemSettings \"\"\"", "str) -> (bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\"", "(bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) \"\"\" pass", "\"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def", "CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str) -> bool", "UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) -> (bool,str)", "-> (bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\"", "DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate:", "IAsyncResult \"\"\" pass def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines this", "-> str \"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings)", "def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\"", "SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) ->", "\"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod def Info(msg): \"\"\" Info(msg: str)", "-> DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult", "it is marshaled across a remoting boundary. A value of false is usually", "pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args):", "GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey:", "GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\" pass def GetOperators(self): \"\"\" GetOperators(self:", "None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self:", "Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs]", "\"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args:", "CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] \"\"\" pass", "GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\" pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning)", "instance with its original invocation list. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "\"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass", "CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines this System.Delegate with the specified", "def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult \"\"\" pass", "None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self:", "AddTaskNotificationCleanupTask(self: General) \"\"\" pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass def AddUserToZone(self,zone,user):", "GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams:", "DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine]", "def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass @staticmethod def GetDefaultAllocationSettings():", "the class\"\"\" @staticmethod def Debug(msg): \"\"\" Debug(msg: str) \"\"\" pass @staticmethod def Error(*__args):", "\"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\"", "(int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\"", "\"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass def", "def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\"", "DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self:", "-> DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\"", "\"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General)", "'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "str,value: object) \"\"\" pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def", "bool \"\"\" pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) -> bool \"\"\" pass def", "GetErpSettings(self: General) -> SystemSettings \"\"\" pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable", "EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self:", "\"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass def", "\"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment): \"\"\"", "The delegate to combine with this delegate. Returns: A delegate that is the", "pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services):", "\"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str) ->", "GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\" pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext)", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda", "Messaging,cancellationToken: CancellationToken) -> Task \"\"\" pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\"", "\"\"\"hardcoded/mock instance of the class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey)", "str) -> (bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str) -> bool", "\"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId):", "see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class", "pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General)", "RemotingPortNr(self: IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass", "\"\"\" ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExceptionHelper() instance=ZZZ()", "\"\"\" pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\" pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self:", "-> bool \"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\"", "General) -> (int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User) -> (int,Zones)", "\"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass def", "DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag])", "def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) \"\"\" pass @staticmethod def GetLibRoot():", "\"\"\" pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs when a System.Data.DataTable is", "def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds):", "bool \"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\" pass", "AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass def AddTaskStockStreamTask(self):", "str \"\"\" pass def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\" pass def", "Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams)", "(bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\" pass def", "CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey", "\"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter:", "\"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def", "element from the invocation list of this System.MulticastDelegate that is equal to the", "Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\"", "def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\"", "of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) ->", "General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\" pass def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs) ->", "(int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass", "type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class", "List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines:", "GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\"", "(int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool \"\"\"", "GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) -> (int,Warehouses)", "def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\"", "General,id: int) -> (bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str) ->", "-> bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def", "GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams:", "int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int \"\"\" pass", "-> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\"", "DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass", "bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject):", "when a System.Data.DataRelation object is removed from a System.Data.DataTable. relation: The System.Data.DataRelation being", "GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args:", "-> bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) ->", "GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) ->", "GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\" pass def GetLogLines(self,args): \"\"\" GetLogLines(self:", "GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str)", "def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def DeletePrintLabel(self,arg): \"\"\"", "Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) ->", "ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications)", "\"\"\" pass def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\" pass def", "GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str)", "def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\"", "\"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def", "when a System.Data.DataTable is removed from a System.Data.DataSet. table: The System.Data.DataTable being removed.", "DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass", "\"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args:", "str,data: str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool", "SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks',", "PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel)", "signature \"\"\" pass class IExtendedServiceLocator: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of", "GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs)", "representation of the DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object. Returns:", "CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self:", "AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str)", "GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self:", "SerializationInfo,context: StreamingContext) Deserializes the table data from the binary or XML stream. info:", "-> List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\"", "RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass", "GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self:", "RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs)", "DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException:", "self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning) -> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method:", "Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod def Info(msg): \"\"\" Info(msg:", "\"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg:", "\"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def", "General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) ->", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda", "\"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) ->", "General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int) ->", "that is the new root of the System.MulticastDelegate invocation list. \"\"\" pass def", "GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks)", "CacheKey) -> WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) -> object \"\"\"", "GetSettings(self: General) -> SystemSettings \"\"\" pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) -> SystemSettingsTable", "pass def LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "the list of event handlers that are attached to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda", "ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\"", "General,lock: ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\"", "DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) ->", "the delegate. \"\"\" pass def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\"", "General,itemCode: str,itemId: str) -> bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds:", "self,v: None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" class ICentralAuthoritySystem: #", "\"\"\"hardcoded/mock instance of the class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "str) -> DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str)", "BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\" pass def", "def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda self: object(),lambda self,v:", "pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CachedSettings(self: General) -> SystemSettings", "CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self:", "str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode:", "MessageBodyDecodeAs,messageBody: str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task \"\"\"", "] class DataSet(DataSet): \"\"\" DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "pass def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines this System.Delegate with", "DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self:", "\"\"\" pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self):", "pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\"", "-> CountGroup \"\"\" pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\"", "pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg):", "\"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) ->", "\"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\" pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self:", "\"\"\" StartProfiler(self: General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\"", "pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool \"\"\" pass def GenerateReplenishmentOrders(self,args):", "str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom:", "DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine]", "OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs])", "def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\"", "OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation object is removed from a System.Data.DataTable.", "def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table data from the", "GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) -> str \"\"\" pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self:", "PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\"", "pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\"", "def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self:", "(int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass", "bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool", "UserName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: UserName(self: CallerContext) -> str \"\"\"", "Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) ->", "the current System.Object. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "-> (bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) ->", "of the class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\" pass", "ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\" pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str) \"\"\"", "GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str])", "List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass", "pass def GetErpName(self): \"\"\" GetErpName(self: General) -> str \"\"\" pass def GetErpSettings(self): \"\"\"", "GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) -> (int,Users) \"\"\" pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General)", "GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications", "General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) ->", "BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\" pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\"", "DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\"", "\"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\" pass def", "Inventory) -> (int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int) -> CountGroup", "-> Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location)", "pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\" pass def", "GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers): \"\"\" GetShippers(self:", "\"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def", "System.Delegate to form a new delegate. follow: The delegate to combine with this", "ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass", "instance of the class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts): \"\"\"", "ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str \"\"\" pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator)", "current delegate does not require arguments. Returns: The object returned by the method", "\"\"\" pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer \"\"\" pass def", "BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a", "AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) ->", "\"\"\" pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\"", "ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject):", "General,id: int) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str) ->", "General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) ->", "SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\"", "IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object)", "in the invocation list for this instance,then a new System.Delegate without value in", "of the class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type)", "MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\"", "Zone,user: User) -> bool \"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool", "-> List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\"", "\"\"\" pass def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def", "GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) ->", "Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object", "-> (int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\" pass", "SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The System.Xml.XmlReader instance that is", "def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "\"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "str) -> (bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase)", "GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode:", "-> (int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int)", "\"\"\" pass def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\" pass", "def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] \"\"\" pass def EnsureLicenseExists(self): \"\"\"", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass def", "General,arg: ModuleArgs) -> bool \"\"\" pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) ->", "-> (bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\" pass", "(int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\"", "str,isBatchNumber: bool) -> (bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) ->", "pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self:", "\"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str)", "Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the event data. \"\"\"", "def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg): \"\"\"", "-> List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) -> object \"\"\" pass", "\"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey", "the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def RestartGooglePubSubServices(self):", "self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda", "General) -> (int,Users) \"\"\" pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) -> (int,Users) \"\"\"", "bool \"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass", "\"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\"", "def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool \"\"\" pass def RemoveWarehouseTransfer(self,key):", "pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class PyLogger(object): #", "CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order:", "-> bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout)", "class AppHost(object): \"\"\" AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def", "def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool \"\"\" pass def", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda self:", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager:", "-> bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks)", "DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript]", "\"\"\" pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg): \"\"\"", "(DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\" pass", "of the class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines)", "DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs]", "pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str) -> bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout):", "def SaveCache(self): \"\"\" SaveCache(self: General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse])", "Inventory,type: CountGroupTypeEnum) -> int \"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str) ->", "__enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back:", "\"\"\"Get: CachedSettings(self: General) -> SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None)", "pass def LoadCache(self): \"\"\" LoadCache(self: General) \"\"\" pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe:", "CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup])", "def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\"", "\"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args:", "str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo:", "(int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass", "\"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) ->", "'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ]", "GetItem(self: General,itemCode: str) -> (bool,Item) \"\"\" pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str)", "data. \"\"\" pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation", "System.Data.DataRelation object is removed from a System.Data.DataTable. relation: The System.Data.DataRelation being removed. \"\"\"", "\"\"\" pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\" pass def GetSerializationData(self,*args):", "\"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int) ->", "def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self): \"\"\"", "-> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders)", "Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) ->", "object(),lambda self,v: None,lambda self: None) class IApplicationSettings: # no doc def ZZZ(self): \"\"\"hardcoded/mock", "BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int \"\"\" pass def CancelProcessCounts(self): \"\"\"", "AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs)", "\"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg:", "def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\" pass def GetAppVersions(self): \"\"\"", "GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self:", "true to release both managed and unmanaged resources; false to release only unmanaged", "ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int) -> str \"\"\" pass", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str)", "DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\" pass def LoadCache(self): \"\"\" LoadCache(self: General) \"\"\" pass", "-> ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int) -> str \"\"\"", "\"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str \"\"\"", "pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg):", "\"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self:", "General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks)", "\"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken:", "str) -> (bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\"", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an", "ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey) ->", "\"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject:", "shallow copy of the current System.Object. \"\"\" pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model:", "GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self:", "-> bool \"\"\" pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) -> bool \"\"\" pass", "\"\"\"hardcoded/returns an instance of the class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self: AppHost) -> UnityContainer", "self,v: None,lambda self: None) \"\"\"Get: Options(self: IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda", "bool \"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool \"\"\" pass", "ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self:", "UpdateCultureOfUserSession(self: General) \"\"\" pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) -> (bool,str) \"\"\" pass", "Inbound) -> (int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers)", "str,printingOptions: PrintingOptions) -> bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label:", "GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) ->", "DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass", "-> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\"", "None) \"\"\"Get: StockManager(self: Outbound) -> IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\"", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message: str) \"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\"", "def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) ->", "StorageAssignmentClassification) -> (int,Locations) \"\"\" pass def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine]", "def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label:", "GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self:", "-> (int,Countries) \"\"\" pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\" pass", "DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int)", "\"\"\" pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class NotificationSummary(MarshalByRefObject):", "def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) -> List[Operator] \"\"\" pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self:", "\"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def", "\"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass def", "from the invocation list of this System.MulticastDelegate that is equal to the specified", "pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines):", "GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) ->", "of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass def", "\"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass def", "\"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def", "None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" class ICentralAuthoritySystem: # no", "pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\"", "DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int", "\"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def", "def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self:", "def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete:", "GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\"", "pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def", "GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int)", "-> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure)", "-> (int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\"", "def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) -> ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self:", "to change. name: The name of the property that is about to change.", "IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self:", "NotificationCenter,notificationId: int,userId: int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\"", "of the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self:", "-> (bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) \"\"\"", "StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning)", "General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) ->", "DataSet) \"\"\" pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects", "\"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\"", "ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference',", "@staticmethod def Info(msg): \"\"\" Info(msg: str) \"\"\" pass @staticmethod def Trace(msg): \"\"\" Trace(msg:", "(int,LibContents) \"\"\" pass @staticmethod def GetLibRoot(): \"\"\" GetLibRoot() -> str \"\"\" pass def", "\"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\"", "\"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool \"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\"", "str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int) ->", "def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg): \"\"\"", "InitializeLifetimeService(self: Inventory) -> object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup)", "\"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def", "str) -> Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo)", "def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) \"\"\" pass @staticmethod", "bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool \"\"\"", "GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs)", "\"\"\" pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass def StopProfiler(self): \"\"\" StopProfiler(self:", "to release only unmanaged resources. \"\"\" pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) ->", "def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId:", "\"\"\" GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General)", "-> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\" pass def LoadCache(self): \"\"\" LoadCache(self: General) \"\"\"", "def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs):", "true if the specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in its binary format,false", "\"\"\" pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) \"\"\" pass def", "InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) -> object \"\"\"", "\"\"\" pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def", "SystemSettingsTable \"\"\" pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\" pass def", "bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\"", "instance of the class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "\"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str) -> bool \"\"\" pass def OnPythonEngineBooted(self):", "CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items)", "instance of the class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\"", "appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will", "self,v: None,lambda self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda", "\"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\"", "\"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool \"\"\" pass", "def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) -> Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self:", "pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones) \"\"\" pass def", "ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus)", "\"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\"", "PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self:", "\"\"\" GetTagsByType(self: General,target: TagTarget) -> (int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General)", "(int,ItemLocations) \"\"\" pass @staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass def", "GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self:", "instance of the class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "-> (bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\" pass", "instance of the class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "DocumentQueue,printRuleId: int) \"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass def", "Outbound) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches)", "an instance of the class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass", "with this delegate. Returns: A delegate that is the new root of the", "pass def GetVersion(self): \"\"\" GetVersion(self: General) -> str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\"", "OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\" pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate)", "CacheKey) -> WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems", "class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\"", "class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "\"\"\" pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str) -> (bool,License) \"\"\" pass def", "-> bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool", "str,type: WarehouseTransferType) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom:", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass def __repr__(self,*args): \"\"\" __repr__(self: object)", "MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "-> SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry]", "(bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\" pass def SetUserCacheData(self,tag,data): \"\"\"", "\"\"\" pass def GetErpName(self): \"\"\" GetErpName(self: General) -> str \"\"\" pass def GetErpSettings(self):", "\"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) ->", "GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self:", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general:", "\"\"\"Get: CurrentLicense(self: General) -> License Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda self,v:", "XmlSchema \"\"\" pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table", "GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\" pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) ->", "GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs)", "pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg):", "type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self: None)", "bool \"\"\" pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str) -> (bool,License) \"\"\" pass", "def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\"", "@staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\" pass class NumberGeneration(MarshalByRefObject):", "BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str) \"\"\" pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs)", "None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v:", "def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args):", "\"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) ->", "\"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self:", "DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\"", "str,memberName: str,value: object) \"\"\" pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult)", "def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\"", "no functions # classes class AppHost(object): \"\"\" AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "signature \"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass", "remoting client calls to be routed to the remote server object. Returns: A", "WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting)", "def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\"", "\"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject:", "__new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda", "GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) ->", "def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds):", "\"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject:", "PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\"", "GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args:", "Outbound) -> (int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings", "str) -> CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass", "General) -> (int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) ->", "def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self:", "\"\"\" PurgeProfilingLog(self: General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass", "IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudProjectId(self:", "class PyLogger(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock):", "GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs)", "bool) -> (int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\"", "GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs)", "DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self:", "def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\"", "the class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type) ->", "GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) ->", "DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically invokes (late-bound) the method represented by the", "Answers \"\"\" pass def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key: str) \"\"\" pass def", "A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates", "(int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) \"\"\" pass", "pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors):", "the current System.Object. \"\"\" pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass", "-> DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass def", "\"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject:", "CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog)", "Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\"", "def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\"", "str) -> bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) ->", "(int,ShipperServiceLinks) \"\"\" pass @staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str \"\"\" pass def", "pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) \"\"\" pass def GetCopyOfCache(self):", "\"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "(int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass", "DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self:", "pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) -> Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\"", "InitializeDerivedDataSet(self: DataSet) \"\"\" pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool", "General) -> (int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) -> Hashtable \"\"\"", "pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location \"\"\" pass def", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda", "CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity:", "table data from the binary or XML stream. info: The System.Runtime.Serialization.SerializationInfo instance. context:", "\"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\" pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args:", "pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool \"\"\" pass def", "bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass", "\"\"\" GetCacheObjectAsXml(self: General,hashCode: int) -> str \"\"\" pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General)", "__getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "\"\"\"hardcoded/mock instance of the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self:", "Inventory,filter: str) -> (int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups)", "GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\"", "\"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\"", "CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str)", "General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) ->", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self:", "\"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan:", "object) \"\"\" pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self):", "CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self:", "General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses)", "\"\"\" pass @staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass @staticmethod def", "-> str \"\"\" pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) -> str \"\"\" pass", "CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid])", "GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) ->", "DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the event", "-> str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings)", "\"\"\" pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass", "def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\"", "GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args:", "GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode:", "bool) -> bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool)", "-> (int,Items) \"\"\" pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\"", "str) -> bool \"\"\" pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\" pass def", "\"\"\" GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode:", "pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken):", "(int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\"", "-> (int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\" pass", "\"\"\" pass @staticmethod def Trace(msg): \"\"\" Trace(msg: str) \"\"\" pass @staticmethod def Warn(*__args):", "bool \"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\" pass def", "(bool,List[str],License) \"\"\" pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) -> ServerHealthEnum \"\"\" pass def", "GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str) -> (bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self:", "GetSessions(self: General) -> (int,Sessions) \"\"\" pass def GetSettings(self): \"\"\" GetSettings(self: General) -> SystemSettings", "pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type):", "Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "\"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\" pass def", "\"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg:", "None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self:", "def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg): \"\"\"", "\"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck:", "GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self:", "pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines):", "\"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass def", "\"\"\" pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def", "DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass", "pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def", "str \"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\" pass def", "GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) -> Hashtable", "pass def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key):", "\"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool \"\"\"", "context. \"\"\" pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass", "MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self:", "-> bool \"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass", "GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args:", "CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey", "def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args):", "Exception,serviceType: Type) -> str \"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key:", "\"\"\" GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy:", "\"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str) -> bool \"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\"", "\"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) \"\"\" pass", "@staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self:", "DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass", "resources; false to release only unmanaged resources. \"\"\" pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self:", "pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args):", "\"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key):", "str,fromDir: str,toDir: str) -> bool \"\"\" pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\"", "pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent \"\"\" pass def GetFileTypes(self):", "\"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\" pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context:", "Outbound,batchId: str,orderNumber: str) -> bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args:", "\"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing) ->", "ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs)", "def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) -> int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\"", "def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\"", "def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\"", "NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns", "str) -> DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment]", "DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass", "\"\"\" pass @staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder):", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging):", "pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications):", "UpdateDatabase(self: General) -> (bool,str) \"\"\" pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase)", "int) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) ->", "\"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self:", "def LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "\"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg:", "Outbound,id: str) -> bool \"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\"", "@staticmethod def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass __all__=[ 'Debug', 'Error',", "CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers:", "DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self:", "CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool", "Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) ->", "str,message: str) \"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x: int,y: int) \"\"\"", "General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\" pass def LoadCache(self): \"\"\" LoadCache(self:", "\"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "\"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args:", "Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str) -> bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\"", "DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args:", "Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) ->", "\"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args:", "the class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddPrintJob(self,args):", "RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey:", "\"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def SaveBatch(self,batch): \"\"\"", "DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value: object) \"\"\"", "Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The System.Xml.XmlReader instance that is passed", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda self:", "DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass", "pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\" pass def", "CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str) -> bool \"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self:", "\"\"\" pass def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\"", "ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable)", "def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\" pass", "StopMarshalledObjectFactories(self: General) \"\"\" pass def StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations):", "-> DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool \"\"\"", "DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg):", "UiForm \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) -> object \"\"\" pass def", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "-> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int \"\"\"", "(int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\" pass def", "CheckServerHealth(self: General) -> ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) ->", "@staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class CallerContext(object): \"\"\" CallerContext() \"\"\" def", "doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an", "(int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\" pass def", "GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self:", "\"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\"", "\"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId:", "\"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an", "-> (int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType:", "General,warehouseCode: str) -> bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting)", "DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup]", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager:", "\"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool \"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject:", "Inventory,warehouseToCode: str) -> bool \"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) ->", "-> (int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\"", "\"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg:", "\"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg):", "Outbound,packagesKey: CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str)", "Returns: A shallow copy of the current System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\"", "Returns: A shallow copy of the current System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\"", "ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self:", "GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args:", "str \"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) ->", "ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool \"\"\" pass def __enter__(self,*args): \"\"\"", "class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self:", "pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int) -> object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey):", "DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass", "\"\"\" __new__(cls: type,general: General) \"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\"", "IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool \"\"\" pass def __init__(self,*args): \"\"\"", "ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval',", "def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\"", "pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args):", "int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter):", "def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\" pass def DeleteRemotePublisher(self,req): \"\"\"", "\"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args:", "\"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def", "ClearResourceCache(self: General) \"\"\" pass def CompileScript(self,script): \"\"\" CompileScript(self: General,script: str) -> List[PythonError] \"\"\"", "General) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition)", "GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) -> object", "NotificationTypeContainer) __new__(cls: type) \"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line:", "\"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject:", "pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\"", "pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId):", "(int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\"", "pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\" pass", "\"\"\" pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) -> (int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags):", "str,warehouseLocationCode: str) -> Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str)", "for signature \"\"\" pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass", "of the class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "\"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\"", "Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) ->", "'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\" DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "\"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\" pass def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning)", "pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\"", "pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self:", "-> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\"", "\"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs])", "pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch):", "ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\"", "\"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool \"\"\" pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General)", "\"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\" pass def", "DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) ->", "def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices): \"\"\"", "-> RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self:", "pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch):", "Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) ->", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: CachedSettings(self: General) -> SystemSettings \"\"\" CurrentLicense=property(lambda self:", "GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int) -> (bool,Tag) \"\"\" pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self:", "int,zoneRights: ZoneRightViews) -> bool \"\"\" pass def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int) ->", "General) -> (int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int) -> (bool,Zone)", "an instance of the class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self:", "GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self:", "-> Publishers \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) -> object \"\"\" pass", "GetBatchArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\"", "str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str) ->", "\"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str \"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self:", "pass Events=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the list of event", "(x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc'", "pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self:", "pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes an element from", "OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult \"\"\" pass def CombineImpl(self,*args): \"\"\" CombineImpl(self:", "SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args:", "\"\"\" pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\"", "\"\"\" pass def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\" pass def GetCurrentAppVersion(self):", "-> (int,ItemLocations) \"\"\" pass @staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str) \"\"\" pass", "\"\"\" pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) -> (bool,str) \"\"\" pass def CreateDevice(self,arg):", "GetDeviceById(self: General,id: int) -> (bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str)", "\"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass def", "GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs)", "int) -> (int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) -> object \"\"\"", "-> Answers \"\"\" pass def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key: str) \"\"\" pass", "\"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages:", "Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs)", "the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which", "def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args): \"\"\"", "pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject):", "array of objects that are the arguments to pass to the method represented", "GetMacAddress(self): \"\"\" GetMacAddress(self: General) -> str \"\"\" pass def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg:", "def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "str) -> (bool,Locations) \"\"\" pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int) -> (bool,Device)", "pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode):", "OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\" pass def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions", "\"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass def", "GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId:", "(int,List[ErpLock]) \"\"\" pass def GetErpName(self): \"\"\" GetErpName(self: General) -> str \"\"\" pass def", "pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) -> str \"\"\" pass def GetImplementedMethods(self):", "\"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound)", "GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str)", "NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged resources used by the System.ComponentModel.MarshalByValueComponent", "GetLibArgs) -> (int,LibContents) \"\"\" pass @staticmethod def GetLibRoot(): \"\"\" GetLibRoot() -> str \"\"\"", "self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None)", "def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v:", "\"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\" pass", "deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios.", "def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\" pass def", "str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes:", "DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a", "ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\"", "GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode:", "the method represented by the delegate. \"\"\" pass def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result:", "def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\" pass def GetLogLines(self,args): \"\"\"", "def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\"", "GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging): \"\"\"", "object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object \"\"\"", "the current System.Object. \"\"\" pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass", "IAsyncResult) -> str \"\"\" pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns", "Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self:", "'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts',", "def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\" pass def RemoveImpl(self,*args):", "\"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass def CreateMessage(self,message):", "\"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) -> (bool,str) \"\"\"", "\"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def", "NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) -> object", "GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key:", "KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs]", "pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines):", "-> Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) \"\"\"", "GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset)", "\"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\"", "\"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity):", "CacheKey,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel)", "def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\"", "value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's", "\"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass def", "(bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\" pass def", "-> bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem", "GetDevicesAll(self: General) -> (int,Devices) \"\"\" pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) -> (int,List[ErpLock])", "\"\"\" GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type:", "in remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been", "shallow copy of the current System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject:", "-> DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\"", "its original invocation list. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str", "Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) ->", "ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self:", "\"\"\" pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message: str) \"\"\" pass def SendMouseClick(self,endPoint,x,y):", "\"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) -> str \"\"\" pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General)", "CancelProcessCounts(self: Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\"", "\"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self:", "General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\"", "\"\"\" GetZoneByName(self: General,name: str) -> (bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId:", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda self:", "pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task):", "AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key:", "Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count)", "\"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject:", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" class", "PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines)", "Type,key: str) -> object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type)", "str,countGroup: CountGroup) -> (bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) ->", "DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User]", "Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode:", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "IMessaging,general: IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inbound() instance=ZZZ()", "GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\"", "DocumentQueue,attributeName: str) -> List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) ->", "the unmanaged resources used by the System.ComponentModel.MarshalByValueComponent and optionally releases the managed resources.", "\"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] \"\"\" pass def", "object is removed from a System.Data.DataTable. relation: The System.Data.DataRelation being removed. \"\"\" pass", "\"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass", "\"\"\" pass def SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\" pass def TransferItems(self,arg): \"\"\" TransferItems(self:", "\"\"\" GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) \"\"\" pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General)", "@staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class PyLogger(object): # no", "Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) ->", "(int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\" pass def", "General,hashCode: int) -> str \"\"\" pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) -> str", "pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self:", "pass @staticmethod def Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass @staticmethod def", "ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType:", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs)", "NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\"", "def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) -> Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self:", "GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly:", "GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self:", "pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId):", "-> str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings)", "AttachClient(self: General,endPoint: str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes)", "pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg):", "-> (int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\"", "def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId:", "-> object \"\"\" pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool \"\"\"", "GetTagsByDescription(self: General,filter: str) -> (int,Tags) \"\"\" pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget)", "LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str)", "\"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool \"\"\" pass", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) -> object \"\"\" pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self:", "-> (bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int \"\"\"", "pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str", "self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: General) -> License Set: CurrentLicense(self: General)=value \"\"\"", "pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "LicensePlateItem) -> LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\"", "\"\"\" pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) -> bool \"\"\" pass def KillAppDomain(self,*__args):", "def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str) -> (bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\"", "\"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\" pass", "General,labelId: int,mappings: Mappings[str,str,str]) -> bool \"\"\" pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value:", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General)", "def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\" pass def", "PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions)", "\"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int \"\"\" pass def", "-> DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\"", "def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self:", "ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self:", "def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\"", "PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo):", "object Creates a shallow copy of the current System.Object. Returns: A shallow copy", "\"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\"", "pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int \"\"\" pass def GetNumberRangeById(self,rangeId):", "type) \"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self:", "GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) ->", "(int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) -> object \"\"\" pass def", "Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo:", "Guid) -> bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines:", "SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\"", "-> (int,Zones) \"\"\" pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) -> (int,Zones) \"\"\" pass", "ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\"", "MessageBodyDecodeAs) -> str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable])", "\"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\"", "DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass", "of the class\"\"\" @staticmethod def Debug(msg): \"\"\" Debug(msg: str) \"\"\" pass @staticmethod def", "Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) ->", "CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self:", "PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId):", "Exception)Warn(ex: BaseException) \"\"\" pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn', ] class", "Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey:", "\"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str) -> bool \"\"\" pass def", "UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\"", "pass def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp):", "CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) \"\"\" pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) ->", "GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self:", "(bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str) -> bool \"\"\" pass", "bool \"\"\" pass def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs) -> bool \"\"\" pass", "the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet.", "def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\"", "Messaging) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers):", "pass def StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args:", "\"\"\" pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self):", "IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) -> bool \"\"\" pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg:", "def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def", "def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers:", "DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table data from the binary or XML stream.", "int) -> object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) ->", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) -> object \"\"\" pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args:", "IApplicationSettings) -> str \"\"\" Options=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Options(self:", "None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v:", "GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self:", "System.Data.DataRelation being removed. \"\"\" pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs when", "DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool", "General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) ->", "-> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\"", "GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\"", "OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args:", "\"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory)", "object. Returns: true if the specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in its", "-> SystemSettings \"\"\" pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass", "pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass def", "(bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) \"\"\" pass", "Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) -> object", "does not require arguments. Returns: The object returned by the method represented by", "SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self:", "\"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id:", "\"\"\" pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\" pass def CompileScript(self,script): \"\"\" CompileScript(self:", "the property that is about to change. \"\"\" pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self:", "the class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self: AppHost) -> UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem):", "pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool \"\"\" pass def CleanupCacheHistory(self):", "GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines)", "assigned a new identity when it is marshaled across a remoting boundary. A", "\"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def", "DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs)", "pass def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self:", "pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode):", "Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str)", "pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool \"\"\" pass def", "General) -> bool \"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase)", "-> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\"", "pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def ZZZ(self):", "def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\"", "\"\"\" GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers:", "IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects the format of the", "User) -> bool \"\"\" pass def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str) \"\"\" pass", "Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self:", "Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber:", "def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\"", "(int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass", "@staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer:", "def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self:", "class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\"", "GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self:", "GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table data from the binary", "ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self:", "def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\"", "def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\" pass def StartProfiler(self): \"\"\"", "CacheKey) \"\"\" pass def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch) -> Batch \"\"\" pass", "def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\"", "\"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self: None)", "General,key: CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] \"\"\"", "info: The System.Runtime.Serialization.SerializationInfo instance. context: The streaming context. \"\"\" pass @staticmethod def GetTypedDataSetSchema(xs):", "GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet:", "AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self:", "ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "DateTime,ledgerCode: str) -> bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines:", "List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) -> object \"\"\" pass def", "FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self:", "pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments):", "-> (int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\"", "DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey]", "def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args):", "GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\"", "DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines])", "being removed. \"\"\" pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs when a", "CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType)", "\"\"\" pass def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def", "UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self:", "DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass", "'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo',", "def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v:", "General,zoneId: int) -> (bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool) ->", "object \"\"\" pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass", "General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) ->", "def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg:", "copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject", "bool) -> bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) ->", "'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug',", "DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass", "\"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str) ->", "\"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass def", "pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines):", "stream. info: The System.Runtime.Serialization.SerializationInfo instance. context: The streaming context. \"\"\" pass @staticmethod def", "\"\"\" pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id): \"\"\"", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob)", "__new__(cls: type) \"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "-> DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]]", "def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\"", "TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat',", "BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[", "A shallow copy of the current System.Object. \"\"\" pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self:", "\"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def", "PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate)", "def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self:", "Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) ->", "by the method represented by the delegate. \"\"\" pass def EndInvoke(self,result): \"\"\" EndInvoke(self:", "GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str)", "\"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass", "\"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str) \"\"\"", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) ->", "\"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\" pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\"", "-> (bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\"", "\"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self: None)", "-> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\"", "-> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str) ->", "def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\" pass def GetErpName(self): \"\"\" GetErpName(self:", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str)", "OfflineScanning) -> AppVersions \"\"\" pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\"", "UploadModule(self: General,arg: AddModuleArgs) -> bool \"\"\" pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str)", "\"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders):", "GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self:", "PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\"", "def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds):", "\"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg:", "current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to", "def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode:", "ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200", "\"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\"", "General) -> (bool,str) \"\"\" pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) ->", "def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds:", "GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) ->", "Array[object]) -> object Dynamically invokes (late-bound) the method represented by the current delegate.", "def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter)", "pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str) -> bool", "(int,List[str]) \"\"\" pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) -> Hashtable \"\"\" pass def", "a shallow copy of the current System.Object. Returns: A shallow copy of the", "CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) ->", "instance of the class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip'", "-> DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass def", "def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\"", "UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs) -> bool \"\"\" pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self:", "GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) ->", "DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup]", "HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) ->", "DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition])", "-> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates", "def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\"", "\"\"\" pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args):", "class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self: AppHost) -> UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\"", "TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\"", "DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass", "\"\"\" GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str)", "the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DoGetAllInstances(self,*args):", "an instance of the class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid]", "pass def __repr__(self,*args): \"\"\" __repr__(self: object) -> str \"\"\" pass UserName=property(lambda self: object(),lambda", "GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self:", "def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer])", "def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging)", "IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "bool \"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass", "\"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def ExecuteSummaries(self): \"\"\"", "GetUserByUserName(self: General,username: str) -> (bool,User) \"\"\" pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str)", "str \"\"\" pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes an", "def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\" pass @staticmethod def GetSnippetRoot(): \"\"\"", "-> str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) \"\"\"", "GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self:", "def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\" pass def GetItemsAll(self,args,items):", "instance of the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "\"\"\" pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args):", "self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda", "\"\"\" pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines):", "\"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged resources used by the System.ComponentModel.MarshalByValueComponent and", "def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\" pass def EnsureLicenseExists(self): \"\"\"", "List[Guid],newStatus: MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass", "GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self): \"\"\"", "pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds):", "General) -> (int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int) -> (int,Users)", "None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\" Options=property(lambda self: object(),lambda self,v:", "Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] \"\"\" pass", "__all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general:", "\"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject:", "pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\"", "Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders:", "CompileScript(self,script): \"\"\" CompileScript(self: General,script: str) -> List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self:", "pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str) -> Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint):", "AsyncCallback,object: object) -> IAsyncResult \"\"\" pass def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) ->", "pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg):", "see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass BosRestBaseUri=property(lambda", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general:", "DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition]", "GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders)", "Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key:", "\"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\"", "SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set:", "CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self:", "GetDistinctTypeListArgs) -> List[str] \"\"\" pass def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid) -> IMessage", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo',", "UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData:", "\"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects the format of the serialized", "def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\"", "FilterOptions) -> (int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes]", "-> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Tables(self: DataSet)", "str) -> (int,Tags) \"\"\" pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget) -> (int,Tags)", "pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass def LogoutClient(self):", "GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self:", "\"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey)", "\"\"\"hardcoded/returns an instance of the class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def ZZZ(self):", "-> str \"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) ->", "GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks)", "GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self:", "-> Delegate Combines this System.Delegate with the specified System.Delegate to form a new", "(bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\" pass", "General,barcode: str) -> (bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) ->", "HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]]", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\" pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args:", "GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) ->", "DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName:", "def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\"", "DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str)", "ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str) -> (bool,Item) \"\"\" pass", "\"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass", "Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\" pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value:", "Inbound) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs)", "\"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task \"\"\" pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message:", "\"\"\" pass class PyLogger(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the", "-> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId:", "return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self:", "-> (int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\"", "of the class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self: AppHost) -> UnityContainer \"\"\" pass def", "pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) -> List[ICachable] \"\"\" pass def GetCountriesActive(self,countries): \"\"\"", "class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddRemotePublisher(self,req): \"\"\"", "IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self:", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for", "instance of the class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\"", "CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs)", "\"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\" pass def", "-> Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\"", "\"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType:", "CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum)", "DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass @staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\" pass", "EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\" pass", "str,warehouseLocationCode: str) -> bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str)", "instance of the class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "\"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter:", "str \"\"\" pass def GetErpSettings(self): \"\"\" GetErpSettings(self: General) -> SystemSettings \"\"\" pass def", "pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self:", "class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\"", "pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self:", "-> (int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) \"\"\"", "def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject:", "\"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders:", "def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint): \"\"\"", "the class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def IsRegistered(self,type=None):", "-> BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def", "None,lambda self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v:", "str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType:", "(int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\" pass", "pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations):", "GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self:", "def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass def CreateMessage(self,message): \"\"\" CreateMessage(self:", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\"", "A shallow copy of the current System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self:", "\"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def", "None) \"\"\"Get: CurrentLicense(self: OfflineScanning) -> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr)", "'Fatal', 'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def", "-> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult", "Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn',", "the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool \"\"\"", "'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\" DataSet()", "-> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\"", "DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self:", "def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location):", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) ->", "OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs]", "\"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass def", "bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool", "DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg): \"\"\" DeleteScript(self:", "\"\"\" pass @staticmethod def __new__(self): \"\"\" __new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\"", "Options(self: IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\"", "x.__class__.__doc__ for signature \"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\"", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) -> object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\"", "\"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass def", "LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass def", "GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self:", "class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "-> MethodInfo Returns a static method represented by the current System.MulticastDelegate. Returns: A", "GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) ->", "CountTypeEnum) -> int \"\"\" pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass def", "def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper):", "str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo:", "Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) ->", "\"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\"", "instance of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass def", "pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers):", "def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self:", "Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds:", "CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode:", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "\"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) ->", "self: object(),lambda self,v: None,lambda self: None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\"", "CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) ->", "value is found in the invocation list for this instance,then a new System.Delegate", "'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\" DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def", "DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self:", "BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id):", "GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str) -> bool \"\"\" pass", "GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\"", "GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines)", "-> (int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\"", "def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message): \"\"\"", "\"\"\" pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) -> (int,Countries) \"\"\" pass def GetCurrentIdentity(self):", "DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass", "new root of the System.MulticastDelegate invocation list. \"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self:", "OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\" pass", "pass def __str__(self,*args): pass Events=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the", "\"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg:", "SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked", "None,lambda self: None) \"\"\"Get: CachedSettings(self: General) -> SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda self,v:", "\"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int)", "pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass", "def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self:", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "TransportPackages) -> ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) -> object \"\"\"", "pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName):", "Batch) \"\"\" pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args):", "pass def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass def GetMacAddress(self):", "pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self:", "\"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def", "\"\"\" pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) -> (int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users):", "self,v: None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda", "def Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass @staticmethod def Fatal(*__args): \"\"\"", "def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\" pass def GetScanners(self): \"\"\" GetScanners(self:", "\"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a static method represented by the current", "General,hint: str) -> Array[str] \"\"\" pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs) ->", "DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers)", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance", "pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order):", "CacheKey,receiveLineId: str) -> bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int)", "def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent)", "Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes)", "NotificationCenter(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationCenter() instance=ZZZ()", "\"\"\" WrapException(ex: Exception) -> RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\"", "def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self:", "DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines]", "pass Messaging=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda", "(int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass def", "(bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass", "\"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None)", "StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) -> bool \"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name:", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) -> object \"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound)", "General,key: CacheKey) -> int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) ->", "\"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg:", "-> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass", "bool \"\"\" pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass", "-> int \"\"\" pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg):", "-> DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value: object) \"\"\" pass", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance", "pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode):", "self,v: None,lambda self: None) \"\"\"Get: UserName(self: CallerContext) -> str \"\"\" class Constants(object): #", "Options=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Options(self: IApplicationSettings) -> str \"\"\"", "\"\"\" pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\"", "\"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget) -> (int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations):", "pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items):", "for signature \"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer)", "-> DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode", "for signature \"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams)", "System.Object. \"\"\" pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args):", "AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) ->", "SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) \"\"\" pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self:", "\"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject:", "DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging:", "\"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args:", "PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self:", "PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\"", "def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args): \"\"\"", "OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that", "def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message):", "GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str)", "\"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject:", "str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom:", "str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) ->", "\"\"\" pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda self: object(),lambda self,v: None,lambda", "General,hashCode: int) -> ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int) ->", "GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot()", "\"\"\" CreateContainer(self: AppHost) -> UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem:", "\"\"\" Relations=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Relations(self: DataSet) -> DataRelationCollection", "(int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass", "str) -> str \"\"\" pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self:", "str) -> (bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages)", "\"\"\" pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool \"\"\" pass def", "General) -> (int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str) -> (int,Tags)", "class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str) ->", "General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) -> (bool,str)", "General,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs)", "def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\"", "\"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args:", "\"\"\" pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\" pass def", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) -> object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode:", "] class CallerContext(object): \"\"\" CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "DocumentQueue,blobId: int) -> BlobContent \"\"\" pass def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum]", "pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult", "\"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs)", "a System.Data.DataSet. table: The System.Data.DataTable being removed. \"\"\" pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self:", "def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) -> bool \"\"\" pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self:", "def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args:", "GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers)", "-> (int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines)", "-> BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\" pass", "-> str \"\"\" pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type:", "object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a", "GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions:", "\"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) -> bool \"\"\" pass def UploadFile(self,name,file,overwrite):", "BaseException) \"\"\" pass @staticmethod def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass", "GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey)", "class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" class Messaging(MarshalByRefObject): \"\"\"", "pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def", "pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\"", "for signature \"\"\" pass @staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type)", "\"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) -> bool \"\"\" pass def ShouldSerializeTables(self,*args):", "\"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning)", "NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) ->", "GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\" pass def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning)", "-> str \"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) \"\"\"", "def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\"", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,object,method): \"\"\" __new__(cls: type,object: object,method: IntPtr)", "class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\"", "type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\" pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass", "def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId):", "PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup:", "GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode:", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Options(self: IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda", "UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) -> (bool,str) \"\"\" pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key:", "GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\"", "\"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) ->", "return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self:", "@staticmethod def WrapException(ex): \"\"\" WrapException(ex: Exception) -> RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\"", "(bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool \"\"\"", "SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch) -> Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self:", "CacheKey) -> (bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs]", "during deserialization of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information", "OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\" pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) ->", "RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 # no doc # no", "self: None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers):", "\"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\" pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn:", "(int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass", "GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool) ->", "def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\"", "-> object \"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\" pass", "General) -> str \"\"\" pass def GetErpSettings(self): \"\"\" GetErpSettings(self: General) -> SystemSettings \"\"\"", "-> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\"", "Deserializes the table data from the binary or XML stream. info: The System.Runtime.Serialization.SerializationInfo", "'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\" DataSet() \"\"\"", "def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget) -> (int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\"", "(int,ColliPresets) \"\"\" pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\" pass def", "DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) ->", "DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass", "pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object \"\"\" pass def", "PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom):", "GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\" pass def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) -> Scanners", "ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass", "__new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self:", "\"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool \"\"\" pass @staticmethod def WrapException(ex): \"\"\" WrapException(ex:", "\"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation object is removed from a", "'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\" DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "IGeneral) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Inventory(MarshalByRefObject): \"\"\"", "-> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\"", "PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool \"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs])", "Type) -> IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) ->", "\"\"\" pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate \"\"\" pass def", "DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass def", "\"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode:", "DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The System.Xml.XmlReader", "pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) \"\"\" pass def GetDeviceByName(self,name,device):", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "\"\"\" pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self:", "method represented by the current delegate. args: An array of objects that are", "ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration]", "str,file: Stream,overwrite: bool) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key:", "class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\"", "\"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\" pass", "GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines)", "of the class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) ->", "General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) ->", "\"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass def", "def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\"", "def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key: str) \"\"\" pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self:", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs)", "static method represented by the current System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self:", "Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons)", "Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) ->", "def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] \"\"\" pass def GetScripts(self,arg,scripts): \"\"\"", "str \"\"\" pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str) -> object \"\"\" pass", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance", "GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) ->", "\"\"\" NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NumberGeneration() instance=ZZZ()", "-> HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages", "\"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) \"\"\" pass def", "GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk:", "\"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self:", "General) -> (int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\"", "GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter)", "System.MulticastDelegate invocation list. \"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object", "\"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId:", "None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda self,v:", "IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingPortNr(self:", "GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams:", "pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass", "AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass", "(bool,Locations) \"\"\" pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int) -> (bool,Device) \"\"\" pass", "GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self:", "instance of the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "\"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId):", "Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams)", "Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\"", "-> bool \"\"\" pass @staticmethod def WrapException(ex): \"\"\" WrapException(ex: Exception) -> RemotingException \"\"\"", "MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v:", "Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def", "see x.__class__.__doc__ for signature \"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\" def", "PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self:", "TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\"", "ViewUsersInZone) -> Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition]", "GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId):", "AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass", "the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0", "DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass", "-> (int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) \"\"\"", "DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs]", "instance of the class\"\"\" def Clone(self): \"\"\" Clone(self: DataSet) -> DataSet \"\"\" pass", "def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass @staticmethod def", "is equal to the specified delegate. value: The delegate to search for in", "Zone,user: User) -> bool \"\"\" pass def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str) \"\"\"", "\"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self: None)", "class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddDirectOrder(self,args): \"\"\"", "Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) ->", "-> DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\"", "\"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def", "Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines:", "\"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\"", "GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) ->", "bool \"\"\" pass def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int) -> str \"\"\" pass", "str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom:", "self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self:", "instance of the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool", "BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus", "str) -> (bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool)", "bool \"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def", "Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses)", "\"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg:", "GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General)", "\"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls:", "-> List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass", "pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject):", "\"\"\" CompileScript(self: General,script: str) -> List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy:", "None,lambda self: None) \"\"\"Get: Relations(self: DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v:", "ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def", "\"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines this System.Delegate with the specified System.Delegate", "pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\" pass", "def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x: int,y: int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\"", "signature \"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "\"\"\" GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey:", "GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\"", "pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts):", "class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args): \"\"\"", "GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self:", "Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\" pass", "object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str \"\"\"", "pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveTranslations(self,translations): \"\"\"", "\"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem \"\"\" pass", "General,active: bool) -> (int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones)", "BatchScanArgs) -> BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass", "instance of the class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\"", "List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def", "def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\"", "PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]]", "def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key:", "def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs)", "Messaging,messageIds: List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\"", "from the binary or XML stream. info: The System.Runtime.Serialization.SerializationInfo instance. context: The streaming", "(bool,Device) \"\"\" pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str) -> (bool,Device) \"\"\" pass", "SaveModule(self: General,module: PythonModule) -> bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings:", "def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\"", "\"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self: None)", "-> OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\"", "CurrentLicense(self: General) -> License Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda", "are the arguments to pass to the method represented by the current delegate.-or-", "\"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass", "def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) -> (int,Countries) \"\"\" pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self:", "\"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\" pass def", "\"\"\" pass def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes):", "-> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\"", "pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] \"\"\" pass def EditRemotePublisher(self,req):", "Guid) -> SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) ->", "def GetErpSettings(self): \"\"\" GetErpSettings(self: General) -> SystemSettings \"\"\" pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self:", "(int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass", "General) -> List[ICachable] \"\"\" pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) -> (int,Countries) \"\"\"", "pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\" pass", "TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate)", "ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists:", "GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self:", "signature \"\"\" pass @staticmethod def __new__(self): \"\"\" __new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext)", "signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\"", "signature \"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings)", "AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass", "GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\"", "-> str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings)", "\"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass", "Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode:", "pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\"", "pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm \"\"\" pass def InitializeLifetimeService(self):", "\"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\"", "\"\"\" pass def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) ->", "-> (int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\"", "str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool \"\"\"", "x; see x.__class__.__doc__ for signature \"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging:", "GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) ->", "GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int)", "General,lines: PrintLinesBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label:", "StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task \"\"\" pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage)", "\"\"\" GetScreenshot(self: General,accessId: str) -> Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint:", "def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\"", "the class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddRemotePublisher(self,req):", "Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "WrapException(ex: Exception) -> RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def", "GetZonesActive(self: General,active: bool) -> (int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) ->", "bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance", "the class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists):", "\"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "CompileScript(self: General,script: str) -> List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone)", "def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg:", "-> List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\"", "(bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str) \"\"\" pass def ChangeItemBarcode(self,args):", "DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel]", "General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass def", "(int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\" pass def", "pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self:", "SendKey(self: General,endPoint: str,key: str) \"\"\" pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message: str)", "pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def", "SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\" pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data: str)", "GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) \"\"\" pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) ->", "def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self:", "def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends a notification that the specified System.Data.DataSet", "-> str \"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\" pass", "-> str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings)", "no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns", "RemotePublishing,filePath: str) -> Task[Stream] \"\"\" pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) ->", "of the class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass", "-> object \"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object", "str) -> (bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation)", "CallerContext) -> str \"\"\" class Constants(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance", "\"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str", "AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass", "pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str) -> bool \"\"\" pass def CloseTransportPackages(self,packagesKey):", "\"\"\"hardcoded/mock instance of the class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "\"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) ->", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general):", "pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) -> (bool,str) \"\"\" pass def CreateDevice(self,arg): \"\"\"", "GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self:", "def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\" pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self:", "Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams)", "\"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def", "\"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "an instance of the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\"", "\"\"\" GetDevicesAll(self: General) -> (int,Devices) \"\"\" pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) ->", "NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) ->", "GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs)", "handlers that are attached to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda", "CountGroup) -> (bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool", "DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors)", "\"\"\" GetSnippetRoot() -> str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions)", "OrderTypeEnum) -> OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult)", "CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() ->", "GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs)", "IApplicationSettings,general: General,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OfflineScanning()", "-> (int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) ->", "Printing) -> Hashtable \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) -> object \"\"\"", "pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self:", "ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\"", "\"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def", "DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs])", "GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter:", "\"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound)", "'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def ZZZ(self):", "Guid) \"\"\" pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody):", "(int,Messages) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) -> object \"\"\" pass def", "pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\"", "IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator)", "str,warehouseLocationCode: str,countGroupId: int) -> CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode:", "root of the System.MulticastDelegate invocation list. \"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args:", "-> str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) \"\"\"", "CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool)", "SerializableDictionary[str,str]) -> List[int] \"\"\" pass def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) -> List[Operator] \"\"\"", "-> (int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) -> object \"\"\" pass", "\"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) \"\"\" pass def", "def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self:", "-> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\"", "CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line:", "\"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass def", "General,script: str) -> List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) ->", "DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\"", "GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid) -> IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self:", "ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self:", "def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\"", "__exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "\"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "GetItemExists(self: General,itemCode: str) -> bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode:", "Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams)", "GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) ->", "str,countGroupId: int) -> CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode:", "list of event handlers that are attached to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self:", "GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) ->", "System.Object. \"\"\" pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass def ReissueMessages(self,messageIds):", "WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts)", "Info(msg: str) \"\"\" pass @staticmethod def Trace(msg): \"\"\" Trace(msg: str) \"\"\" pass @staticmethod", "-> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\"", "GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams) ->", "def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers): \"\"\"", "signature \"\"\" pass @staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\"", "SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines)", "General,zone: Zone,user: User) -> bool \"\"\" pass def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str)", "serialized in its binary format,false otherwise. \"\"\" pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent:", "def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\" pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self:", "current System.Object. \"\"\" pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def", "DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode):", "AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs)", "Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) ->", "-> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\"", "Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) ->", "represented by the current delegate does not require arguments. Returns: The object returned", "def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\"", "General,message: str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers", "pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg):", "ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass", "str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count): \"\"\"", "\"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) ->", "str) \"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool \"\"\" pass", "PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool \"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self:", "pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId):", "Outbound,packagesKey: CacheKey) \"\"\" pass def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch) -> Batch \"\"\"", "(bool,Translation) \"\"\" pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str) -> Array[Byte] \"\"\" pass", "remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. Returns: An", "pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject):", "\"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass @staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch:", "GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type])", "PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args):", "\"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure):", "-> CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\"", "(bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\" pass def", "\"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def", "object) \"\"\" pass def SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule) -> bool \"\"\" pass", "def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg): \"\"\"", "SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization", "(int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\"", "def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode:", "-> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\"", "pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId):", "The System.Xml.XmlReader instance that is passed during deserialization of the System.Data.DataSet. Returns: An", "-> ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\"", "RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) ->", "\"\"\" class Constants(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "-> (int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\" pass", "StreamingContext) Deserializes the table data from the binary or XML stream. info: The", "that contains the event data. \"\"\" pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation)", "int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) ->", "SaveBatch(self: Outbound,batch: Batch) -> Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName:", "str) -> bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) ->", "the event data. \"\"\" pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when", "GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) ->", "Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key:", "pass def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key: str) \"\"\" pass def SendMessage(self,endPoint,message): \"\"\"", "\"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) ->", "int) -> (bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int", "GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass def GetMacAddress(self): \"\"\" GetMacAddress(self:", "pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool \"\"\" pass def", "MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str) -> bool \"\"\" pass def OnPythonEngineBooted(self): \"\"\"", "\"\"\" pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def", "CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self:", "def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self:", "\"\"\" pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\" pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self:", "CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning) -> License \"\"\"", "Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\" pass def RemoveImpl(self,*args): \"\"\"", "Type) -> bool \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "int,mappings: Mappings[str,str,str]) -> bool \"\"\" pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value: object)", "\"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) \"\"\" pass def", "str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "to pass to the method represented by the current delegate.-or- null,if the method", "-> List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\"", "Relations(self: DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup)", "(int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer \"\"\" pass", "None,lambda self: None) \"\"\"Gets the list of event handlers that are attached to", "PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintTestLabel(self,labelId,testRun):", "\"\"\" pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int) -> CountGroup \"\"\" pass def", "\"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\"", "\"\"\" pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) -> (int,Users) \"\"\" pass def GetUsersInactive(self,users):", "def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self:", "object,method: IntPtr) \"\"\" pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging)", "pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass", "License Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda", "-> bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) ->", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "current System.Object. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\"", "DeleteModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel])", "\"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers):", "\"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\"", "def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\"", "DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def Clone(self): \"\"\" Clone(self: DataSet)", "def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self:", "bool \"\"\" pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass", "str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers \"\"\"", "\"\"\" pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str) -> (bool,User) \"\"\" pass def", "\"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter:", "\"\"\" pass def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId):", "bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool", "def GetLibRoot(): \"\"\" GetLibRoot() -> str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id:", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order):", "invocation list. Returns: If value is found in the invocation list for this", "AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def", "the current delegate does not require arguments. Returns: The object returned by the", "SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\"", "Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper])", "\"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\"", "General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode:", "\"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\"", "General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) ->", "def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\" pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self:", "a System.Data.DataTable. relation: The System.Data.DataRelation being removed. \"\"\" pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self:", "\"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self:", "GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) -> (int,Countries) \"\"\" pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General)", "-> (bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\"", "\"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) ->", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) -> object \"\"\" pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General)", "General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams)", "object Dynamically invokes (late-bound) the method represented by the current delegate. args: An", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,object,method): \"\"\" __new__(cls: type,object: object,method:", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value", "def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\"", "-> List[str] \"\"\" pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm \"\"\"", "GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self:", "def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\"", "-> bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) ->", "pass def GetSessions(self,sessions): \"\"\" GetSessions(self: General) -> (int,Sessions) \"\"\" pass def GetSettings(self): \"\"\"", "SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value:", "int) -> (bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str) -> (bool,Zone)", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda", "GetTagsByType(self: General,target: TagTarget) -> (int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) ->", "CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int)", "def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda", "General,macAddress: str) -> (bool,Device) \"\"\" pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str) ->", "-> (bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) \"\"\"", "NumberGeneration,rangeId: int) -> NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) ->", "def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\"", "pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str) -> str \"\"\" pass def GetUsersActive(self,users):", "\"\"\" pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\" pass", "General) \"\"\" pass def StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\"", "\"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass", "\"\"\"hardcoded/mock instance of the class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "\"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int)", "General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode:", "class BusinessLayerExtensions(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "General,id: int) -> (bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets)", "\"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\"", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance", "Wms.RemotingImplementation calls itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 # no", "# no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ()", "StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Mailer(object): \"\"\" Mailer() \"\"\" def", "the DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object. Returns: true if", "pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked):", "\"\"\"hardcoded/mock instance of the class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "(int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass", "\"\"\"hardcoded/returns an instance of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\"", "OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) ->", "the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\" pass", "CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs)", "pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) \"\"\" pass", "AppHost(object): \"\"\" AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return AppHost()", "\"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def", "IUnityContainer) \"\"\" pass class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self):", "pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\"", "ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object \"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script:", "\"\"\" pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self): \"\"\"", "RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user:", "System.Object. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "GetOperators(self: DocumentQueue) -> List[Operator] \"\"\" pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) ->", "of the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool \"\"\"", "IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self:", "\"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\"", "GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary)", "def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) -> str \"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self:", "ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber:", "-> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int) -> ICachable \"\"\"", "CacheKey,itemId: str) -> bool \"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) ->", "-> AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\"", "-> DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\"", "require arguments. Returns: The object returned by the method represented by the delegate.", "pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] \"\"\" pass def GetScripts(self,arg,scripts):", "str \"\"\" pass def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\" pass", "-> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\"", "AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self:", "ErpLock) -> int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int", "(int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass", "\"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda", "General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "by the current System.MulticastDelegate. Returns: A static method represented by the current System.MulticastDelegate.", "\"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId:", "\"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass def", "def Clone(self): \"\"\" Clone(self: DataSet) -> DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self:", "def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\" pass def PrintPackageSlip(self,args): \"\"\"", "DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] \"\"\" pass", "\"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId:", "pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\"", "otherwise,this instance with its original invocation list. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class IExtendedServiceLocator: # no", "-> (Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs)", "DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs) ->", "\"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\"", "the method represented by the current delegate does not require arguments. Returns: The", "-> (bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\"", "-> Hashtable \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) -> object \"\"\" pass", "-> str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings)", "\"\"\" GetMessage(self: Messaging,messageId: Guid) -> IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId:", "DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass", "an instance of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "client calls to be routed to the remote server object. Returns: A shallow", "def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int) -> ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\"", "classes class AppHost(object): \"\"\" AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\"", "-> (int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\"", "Inventory) -> (int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder)", "\"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody:", "str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod def Info(msg): \"\"\" Info(msg: str) \"\"\" pass", "pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages \"\"\" pass def", "Publisher \"\"\" pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath):", "None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self:", "GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self:", "Inventory,itemId: int) \"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\"", "UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs)", "RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode:", "DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg):", "-> bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) ->", "RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self:", "DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs]", "(int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int) -> (bool,User) \"\"\" pass", "PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs])", "-> bool \"\"\" pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\"", "def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages):", "pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A", "str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass @staticmethod def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException)", "GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) ->", "bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity:", "-> PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob]", "PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\"", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) -> object \"\"\" pass def MarkAsRead(self,notificationId,userId):", "-> CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) ->", "GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self:", "pass def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations):", "\"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\"", "pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups):", "Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo:", "NotificationCenter,notificationId: int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def", "General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject:", "MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int)", "@staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool \"\"\" pass @staticmethod def", "General) -> object \"\"\" pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) -> bool \"\"\"", "DocumentQueue) -> List[Operator] \"\"\" pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule]", "ModuleArgs) -> (bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) -> int", "class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines]", "SetUserCacheData(self: General,tag: str,data: str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews)", "specified System.Data.DataSet property is about to change. name: The name of the property", "without value in its invocation list; otherwise,this instance with its original invocation list.", "\"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers \"\"\" pass", "return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging()", "(int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\"", "AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\"", "CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self:", "\"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass def", "IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg):", "\"\"\" pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def", "\"\"\"hardcoded/mock instance of the class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "\"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg:", "\"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass", "GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) \"\"\" pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self:", "def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg): \"\"\"", "payload. \"\"\" pass def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged resources", "__enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type:", "-> DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\"", "-> (int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int) -> (bool,Tag) \"\"\"", "GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self:", "pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode):", "\"\"\" GetOperators(self: DocumentQueue) -> List[Operator] \"\"\" pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs)", "NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an", "def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a static method represented by", "CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self:", "UnityServiceLocator,serviceType: Type,key: str) -> object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType:", "pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\" pass def", "pass @staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda self:", "General) \"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) ->", "\"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode:", "InitializeLifetimeService(self: Inbound) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int \"\"\" pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory)", "current System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str)", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance", "information has been omitted from the payload. \"\"\" pass def Dispose(self): \"\"\" Dispose(self:", "str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom:", "def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk):", "General,agent: BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool", "General,name: str) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) ->", "\"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General)", "DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) ->", "-> License Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self: None)", "DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript])", "Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) ->", "\"\"\" GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs)", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda", "current System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\"", "__reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets", "def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg): \"\"\"", "to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject", "pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass def GetItemVendors(self,args,vendors):", "\"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg:", "int) -> (bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications)", "GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self:", "\"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None", "-> (int,Users) \"\"\" pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) -> (int,Users) \"\"\" pass", "IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode:", "General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) ->", "GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs)", "marshaled across a remoting boundary. A value of false is usually appropriate. true", "pass def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\" pass def GetCurrentAppVersion(self): \"\"\"", "copy of the current System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs])", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns", "class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\"", "MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey:", "DocumentQueue,ruleIds: List[int]) \"\"\" pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent \"\"\"", "\"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers:", "(bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\" pass", "System.Delegate without value in its invocation list; otherwise,this instance with its original invocation", "\"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs)", "UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool \"\"\" pass", "GetDeviceByName(self: General,name: str) -> (bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str)", "\"\"\" pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def", "\"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self:", "GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self:", "-> (int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\"", "AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\" pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs)", "\"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) ->", "pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult", "of the property that is about to change. \"\"\" pass def ReadXmlSerializable(self,*args): \"\"\"", "GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) ->", "self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda", "-> CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum)", "ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage)", "\"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs)", "Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args:", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) -> object \"\"\" pass def MemberwiseClone(self,*args):", "\"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) ->", "Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type:", "Outbound,shipmentId: int) -> HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages)", "\"\"\" pass def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\" pass def", "-> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\"", "InitializeLifetimeService(self: General) -> object \"\"\" pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) -> bool", "\"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def", "\"\"\" pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def", "identity when it is marshaled across a remoting boundary. A value of false", "pass def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\"", "-> DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key):", "return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self:", "\"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int)", "\"\"\" UploadNewLicense(self: General,xml: str) -> (bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject:", "\"\"\" pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn):", "Returns: A shallow copy of the current System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\"", "def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\"", "\"\"\" GetSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str)", "pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items):", "str,countGroup: int,description: str,date: DateTime,ledgerCode: str) -> bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self:", "-> ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool \"\"\"", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda self:", "\"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "OfflineScanning) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def AttachClient(self,endPoint):", "DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs)", "UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\"", "GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self:", "General,id: int) -> (bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) ->", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) -> object \"\"\" pass def MemberwiseClone(self,*args):", "this delegate. Returns: A delegate that is the new root of the System.MulticastDelegate", "System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted from", "see x.__class__.__doc__ for signature \"\"\" pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral)", "pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool \"\"\" pass def", "self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda", "RpRestBaseUri(self: IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\"", "GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams:", "(bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem)", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg): \"\"\" DeleteTag(self:", "\"\"\" pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\" pass def GetConfigurationForm(self,executionType):", "pass def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str) -> (bool,Item) \"\"\" pass def GetItemExists(self,itemCode):", "General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs) ->", "\"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\"", "method represented by the current delegate does not require arguments. Returns: The object", "pass def GetSettings(self): \"\"\" GetSettings(self: General) -> SystemSettings \"\"\" pass def GetSettingsTable(self): \"\"\"", "GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self:", "General,id: int) -> (bool,Tag) \"\"\" pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) -> (int,Tags)", "pass @staticmethod def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass __all__=[ 'Debug',", "pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId:", "int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode:", "CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) -> bool \"\"\" pass", "-> (bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\"", "General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) ->", "def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines this System.Delegate with the", "(int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable \"\"\" pass", "pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation object is", "CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self:", "GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self:", "-> str \"\"\" pass def GetErpSettings(self): \"\"\" GetErpSettings(self: General) -> SystemSettings \"\"\" pass", "str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode:", "pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool \"\"\" pass def", "\"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow", "pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\"", "AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self:", "return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self:", "def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "-> bool \"\"\" pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value: object) \"\"\" pass", "\"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General)", "bool \"\"\" pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter:", "pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self:", "ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) ->", "an instance of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts'", "\"\"\" pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def", "\"\"\" pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset):", "DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self:", "\"\"\" pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\" pass def GetAllExecutionTypes(self):", "def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders):", "pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\"", "def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass def", "-> DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\"", "-> IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object", "-> bool \"\"\" pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool \"\"\"", "\"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\" pass def", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class IExtendedServiceLocator: #", "GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\" pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args:", "UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) ->", "GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str) -> (int,Tags) \"\"\" pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self:", "\"\"\"hardcoded/mock instance of the class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "self,v: None,lambda self: None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self):", "FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\" pass def GetAllocationProfiles(self,profiles):", "str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str) ->", "AddTaskLogCleanupTask(self: General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def AddTaskNotificationCleanupTask(self):", "\"\"\" ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General)", "GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script:", "-> (OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\"", "GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors)", "is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its", "\"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool \"\"\" pass def", "pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self:", "str) \"\"\" pass def LoadCache(self): \"\"\" LoadCache(self: General) \"\"\" pass def LoadSettings(self,*__args): \"\"\"", "-> (int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\"", "PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) ->", "\"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\" pass", "is removed from a System.Data.DataTable. relation: The System.Data.DataRelation being removed. \"\"\" pass def", "DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\"", "be assigned a new identity when it is marshaled across a remoting boundary.", "-> Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] \"\"\"", "Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str) ->", "-> bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) \"\"\"", "\"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\" pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str)", "when it is marshaled across a remoting boundary. A value of false is", "\"\"\" pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass def", "binary or XML stream. info: The System.Runtime.Serialization.SerializationInfo instance. context: The streaming context. \"\"\"", "ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs]", "ShouldSerializeRelations(self: DataSet) -> bool \"\"\" pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) -> bool", "\"\"\" pass def CompileScript(self,script): \"\"\" CompileScript(self: General,script: str) -> List[PythonError] \"\"\" pass def", "GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int) -> ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self:", "\"\"\" FinishUploadModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject:", "General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity)", "\"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) ->", "def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\"", "\"\"\" CloseBatchForPickingById(self: Outbound,id: str) -> bool \"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey:", "invocation list; otherwise,this instance with its original invocation list. \"\"\" pass def __init__(self,*args):", "GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str) ->", "\"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace',", "The System.Runtime.Serialization.SerializationInfo instance. context: The streaming context. \"\"\" pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\"", "DataSet(DataSet): \"\"\" DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DataSet()", "AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone:", "str,warehouseLocationTo: str) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line:", "def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self:", "-> str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings)", "OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self:", "str,value: object) \"\"\" pass def SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule) -> bool \"\"\"", "DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass", "shallow copy of the current System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile:", "List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\" pass", "GetScanners(self: OfflineScanning) -> Scanners \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) -> object", "pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence):", "\"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition):", "GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\"", "ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExceptionHelper()", "\"\"\" GetTagsByDescription(self: General,filter: str) -> (int,Tags) \"\"\" pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target:", "GetUsersInactive(self: General) -> (int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int) ->", "bool,name: str,fromDir: str,toDir: str) -> bool \"\"\" pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General)", "pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg):", "\"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args:", "pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args):", "pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args):", "def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\"", "LoadCache(self: General) \"\"\" pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\"", "Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) ->", "of the class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self): \"\"\"", "GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) \"\"\" pass @staticmethod def GetLibRoot(): \"\"\"", "self,v: None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda", "\"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def", "pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid):", "-> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass", "\"\"\" pass def StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self:", "-> (int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\"", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\"", "FinishUploadModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs])", "HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing)", "GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "str) -> Task[Stream] \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def", "used by the System.ComponentModel.MarshalByValueComponent and optionally releases the managed resources. disposing: true to", "-> (int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings \"\"\"", "GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter)", "Inbound) -> (int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders)", "(bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass", "str,callback: AsyncCallback,object: object) -> IAsyncResult \"\"\" pass def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate)", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings:", "General) -> SystemSettings \"\"\" pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\"", "-> List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) -> str \"\"\"", "str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) ->", "\"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool \"\"\" pass def SyncStock(self): \"\"\" SyncStock(self:", "the current System.Object. \"\"\" pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass", "\"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def", "(int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\"", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def SetCurrentAppVersion(self,args):", "DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self:", "pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) -> bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\"", "CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate", "__new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\" pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda", "pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches):", "-> object Dynamically invokes (late-bound) the method represented by the current delegate. args:", "List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def", "reader: The System.Xml.XmlReader instance that is passed during deserialization of the System.Data.DataSet. Returns:", "\"\"\" pass def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def", "\"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\" pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) ->", "DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass def DeleteCountFromCache(self,arg):", "pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self:", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "-> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool", "\"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) ->", "MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to", "def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self:", "GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent \"\"\" pass def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) ->", "def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool", "(int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass", "GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\" pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion", "SystemSettings) \"\"\" pass def LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\" pass def LogoutUser(self): \"\"\"", "of objects that are the arguments to pass to the method represented by", "def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) ->", "-> (bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\"", "GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\" pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation]", "instance of the class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def", "System.MulticastDelegate. Returns: A static method represented by the current System.MulticastDelegate. \"\"\" pass def", "int) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask)", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,container): \"\"\"", "(int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\" pass", "GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation) \"\"\" pass def GetScreenshot(self,accessId): \"\"\"", "def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task \"\"\" pass def UpdateMessage(self,message): \"\"\"", "SaveSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\"", "pass @staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass @staticmethod def GetDefaultBatchSink():", "CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) -> (bool,str) \"\"\" pass def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg:", "PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\" pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self:", "# from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 # no doc # no important", "bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool", "-> DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\"", "-> (int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems)", "def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int) -> (bool,Tag) \"\"\" pass def GetTagsAll(self,tags): \"\"\"", "General) -> (int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask)", "pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses):", "AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass", "DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\"", "\"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool \"\"\" pass", "pass def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid) -> IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs):", "GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) ->", "PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType):", "str,possibleAnswers: int) -> Answers \"\"\" pass def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key: str)", "pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) -> ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\"", "pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items):", "def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate):", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher:", "bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args): \"\"\"", "General,lock: ErpLock) -> int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) ->", "AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self:", "pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass", "\"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args:", "-> IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str", "\"\"\" pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item):", "-> Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) ->", "-> (int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) \"\"\"", "str) -> Array[str] \"\"\" pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts)", "GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self:", "\"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str", "def GetServerDate(self): \"\"\" GetServerDate(self: General) -> DateTime \"\"\" pass def GetSessions(self,sessions): \"\"\" GetSessions(self:", "str) -> Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) ->", "def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath:", "DocumentQueue) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "\"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg:", "def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data: str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self:", "Returns: true if the specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in its binary", "\"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] \"\"\" pass def", "self: None) \"\"\"Get: Relations(self: DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda", "Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams)", "self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda", "DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink]", "GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User) ->", "-> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\"", "def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool \"\"\" pass def", "General) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels)", "pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary)", "pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\" pass def StartProfiler(self):", "General,endPoint: str,message: str) \"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x: int,y: int)", "@staticmethod def Trace(msg): \"\"\" Trace(msg: str) \"\"\" pass @staticmethod def Warn(*__args): \"\"\" Warn(msg:", "RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User) ->", "SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "event data. \"\"\" pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a", "DocumentQueue) -> List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) -> object \"\"\"", "def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\" pass def GetMessage(self,messageId): \"\"\"", "None,lambda self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v:", "\"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass @staticmethod", "ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode):", "DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages)", "GetZoneByName(self: General,name: str) -> (bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int)", "-> IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self:", "DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink]", "pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args):", "WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) ->", "(BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int) -> ICachable \"\"\" pass", "ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object \"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\"", "\"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self:", "(int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\" pass def", "Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) ->", "\"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\"", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\"", "\"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders):", "Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings:", "ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml'", "CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self:", "BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult \"\"\" pass def", "pass @staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda self:", "\"\"\" pass def GetSessions(self,sessions): \"\"\" GetSessions(self: General) -> (int,Sessions) \"\"\" pass def GetSettings(self):", "to be assigned a new identity when it is marshaled across a remoting", "ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns", "(bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine)", "A shallow copy of the current System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self:", "GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging:", "GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self:", "NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) -> object", "An array of objects that are the arguments to pass to the method", "CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines this System.Delegate with the specified System.Delegate to", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general:", "def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\"", "def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons): \"\"\"", "GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args:", "'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\" DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "\"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId:", "(int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) \"\"\" pass", "General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\"", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general):", "-> (int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\"", "Messaging,messageId: Guid) \"\"\" pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\" pass", "class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddScanner(self,args): \"\"\"", "Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode:", "CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification])", "-> ItemIdentifications \"\"\" pass def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId:", "UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str) -> (bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self:", "GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self:", "\"\"\" ExecuteScript(self: General,script: str) -> object \"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id:", "GetBoxColors(self: Outbound) -> Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) ->", "def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\"", "GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int) -> (bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self:", "DateTime \"\"\" pass def GetSessions(self,sessions): \"\"\" GetSessions(self: General) -> (int,Sessions) \"\"\" pass def", "UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject:", "General) -> (int,Devices) \"\"\" pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\"", "-> int \"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange \"\"\"", "-> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\"", "of the class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass def", "value: The delegate to search for in the invocation list. Returns: If value", "\"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg:", "\"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass def", "\"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\" pass def", "IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "-> (int,Sessions) \"\"\" pass def GetSettings(self): \"\"\" GetSettings(self: General) -> SystemSettings \"\"\" pass", "enumeration indicating whether schema information has been omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader:", "Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) -> Array[Color]", "GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int) ->", "def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\"", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "\"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self:", "pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\" pass def CompileScript(self,script): \"\"\" CompileScript(self: General,script:", "BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str \"\"\"", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance", "pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs)", "Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs)", "def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\"", "@staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\" pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args:", "the class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddScanner(self,args):", "-> (int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\"", "-> (bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\"", "bool \"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass", "GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs)", "def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def", "pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self:", "DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) ->", "int \"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\"", "\"\"\" pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\" pass def", "\"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def", "\"\"\" pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass def __init__(self,*args): \"\"\"", "\"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str)", "def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds:", "GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) ->", "DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) -> (bool,str) \"\"\" pass def", "def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations): \"\"\"", "def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\"", "pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) -> (int,Devices) \"\"\" pass def GetErpLocks(self,locks): \"\"\"", "pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass", "\"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\"", "DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) ->", "\"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def StartRemotePublishingInboundListener(self):", "'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter',", "Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) ->", "str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType])", "\"\"\" pass def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) -> List[Operator] \"\"\" pass def GetPrinterRules(self,args):", "shallow copy of the current System.Object. \"\"\" pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args:", "def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool \"\"\" pass def", "\"\"\" pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveTranslations(self,translations):", "None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\" pass def", "def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds:", "List[ICachable] \"\"\" pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) -> (int,Countries) \"\"\" pass def", "GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass @staticmethod def GetDefaultAllocationSettings(): \"\"\"", "PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary]", "x.__getitem__(y) <==> x[y] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject):", "\"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass def", "delegate does not require arguments. Returns: The object returned by the method represented", "\"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode:", "-> bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass", "\"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass def", "copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow", "pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey", "def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool \"\"\" pass def", "PagingParams) -> (int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) ->", "DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification]", "-> (int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\"", "-> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages)", "WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) -> object \"\"\" pass def", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda", "Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) ->", "bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool", "UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self:", "str \"\"\" Options=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Options(self: IApplicationSettings) ->", "DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey])", "pass def GetErpSettings(self): \"\"\" GetErpSettings(self: General) -> SystemSettings \"\"\" pass def GetErpSettingsTable(self): \"\"\"", "pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class", "str) \"\"\" pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message: str) \"\"\" pass def", "that is equal to the specified delegate. value: The delegate to search for", "def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone:", "def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\"", "IMessage) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers):", "GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\"", "def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) \"\"\" pass def ScanItemForPacking(self,args,result): \"\"\"", "current System.Object. \"\"\" pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def", "\"\"\" SendMessage(self: General,endPoint: str,message: str) \"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x:", "\"\"\" pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass def", "-> bool \"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\"", "pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self:", "General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns", "DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass", "pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\"", "\"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) \"\"\" pass def", "def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg): \"\"\"", "def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\" pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs)", "General,itemCode: str) -> bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str)", "UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "\"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass", "-> (int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int) -> (bool,User) \"\"\"", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\"", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general:", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an", "self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda", "def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self:", "pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self):", "GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg:", "def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\"", "DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass", "General,endPoint: str,key: str) \"\"\" pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message: str) \"\"\"", "-> DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\"", "\"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\" pass def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) ->", "Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams:", "Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns", "Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) ->", "CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript])", "been omitted from the payload. \"\"\" pass def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool)", "def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked):", "-> XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass def IsBinarySerialized(self,*args):", "\"\"\" RestartScriptEngine(self: General) \"\"\" pass def SaveCache(self): \"\"\" SaveCache(self: General) \"\"\" pass def", "Returns: A static method represented by the current System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound):", "GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) ->", "def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\" pass def CompileScript(self,script): \"\"\" CompileScript(self: General,script: str)", "(bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) -> int \"\"\" pass", "\"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def", "(bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass", "GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) ->", "\"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\"", "pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\"", "GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey)", "def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "\"\"\" GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid)", "delegate to search for in the invocation list. Returns: If value is found", "the class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\"", "GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\" pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General)", "SaveErpSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule) ->", "-> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine)", "pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type):", "false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to", "CountGroupTypeEnum) -> CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo:", "GetPrintersTable(self: General) -> Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str) ->", "attached to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation) \"\"\" pass def", "instance of the class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer) \"\"\"", "RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) ->", "None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self:", "of the current System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) ->", "GetCount(self: Inventory,key: CacheKey) -> (bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int)", "self: None) \"\"\"Get: Tables(self: DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None", "pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str) -> (bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo):", "Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) ->", "\"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def", "def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode):", "is marshaled across a remoting boundary. A value of false is usually appropriate.", "GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self:", "CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started):", "\"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg:", "\"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\" pass def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue)", "pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) -> (bool,str) \"\"\" pass def UpdatePrintLine(self,key,line): \"\"\"", "None,lambda self: None) \"\"\"Get: UserName(self: CallerContext) -> str \"\"\" class Constants(object): # no", "def CreateContainer(self): \"\"\" CreateContainer(self: AppHost) -> UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self:", "with its original invocation list. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int)", "Returns: If value is found in the invocation list for this instance,then a", "General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) ->", "\"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId:", "LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate", "\"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool \"\"\" pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General)", "\"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy:", "str) -> (bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y] \"\"\" pass", "str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs])", "Outbound) -> (int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches)", "self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda", "for signature \"\"\" pass class IExtendedServiceLocator: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance", "def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str) -> (int,Tags) \"\"\" pass def GetTagsByType(self,target,tags): \"\"\"", "GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\"", "OutputCacheStatusToLog(self: General) \"\"\" pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool", "pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\"", "Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment])", "\"\"\" CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return CallerContext() instance=ZZZ()", "PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\" pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs)", "is the new root of the System.MulticastDelegate invocation list. \"\"\" pass def DynamicInvokeImpl(self,*args):", "__new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary()", "def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass def", "RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity):", "None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\" Options=property(lambda self: object(),lambda self,v: None,lambda self:", "\"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self:", "def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\"", "List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\" pass", "\"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\" pass def", "instance of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None", "DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid \"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId:", "StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem)", "GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass def GetMacAddress(self): \"\"\" GetMacAddress(self: General) ->", "IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,general):", "\"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) ->", "__new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer) \"\"\" pass class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue:", "pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting):", "ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject):", "General) -> str \"\"\" pass def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule)", "AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy:", "InitializeLifetimeService(self: NotificationCenter) -> object \"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int)", "General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) -> (int,PrintLabels)", "List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self:", "DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass", "UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self:", "int) -> Answers \"\"\" pass def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key: str) \"\"\"", "pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def", "GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self:", "delegate.-or- null,if the method represented by the current delegate does not require arguments.", "\"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\" pass def", "str,value: object) \"\"\" pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\"", "General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v:", "FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self:", "str) -> Task[Stream] \"\"\" pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher", "is about to change. name: The name of the property that is about", "MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the", "(bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass", "GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self:", "General,endPoint: str) \"\"\" pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool \"\"\"", "-> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool \"\"\"", "GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte]", "\"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) \"\"\" pass def", "MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str \"\"\"", "GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments):", "General,itemCode: str) -> (bool,Item) \"\"\" pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str) ->", "an instance of the class\"\"\" def Clone(self): \"\"\" Clone(self: DataSet) -> DataSet \"\"\"", "\"\"\" SaveSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs])", "to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting", "Exception,currentIdentity: RemotingIdentity) -> bool \"\"\" pass @staticmethod def WrapException(ex): \"\"\" WrapException(ex: Exception) ->", "CleanupUserCacheData(self: General) \"\"\" pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\" pass def CompileScript(self,script):", "\"\"\" pass def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def", "object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "\"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\" pass def MemberwiseClone(self,*args):", "\"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass def", "General) -> bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License)", "pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\" pass def PrintDuplicateLabels(self,args):", "DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\"", "pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder):", "str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject:", "def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) -> bool \"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self:", "def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines):", "def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject):", "GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs)", "args: An array of objects that are the arguments to pass to the", "\"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\" pass def StartProfiler(self): \"\"\" StartProfiler(self: General)", "\"\"\" class ICentralAuthoritySystem: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "\"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str", "pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self:", "current System.Object. \"\"\" pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\"", "Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\"", "of the DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object. Returns: true", "DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int]", "self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning) -> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\"", "BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\"", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda self:", "\"\"\" pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels):", "\"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing)", "is removed from a System.Data.DataSet. table: The System.Data.DataTable being removed. \"\"\" pass def", "'ConvertTo', ] class CallerContext(object): \"\"\" CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet):", "def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule:", "pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) -> str \"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\"", "PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer:", "User) -> bool \"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\"", "GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders)", "-> List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\"", "-> (int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\" pass", "\"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration)", "A shallow copy of the current System.Object. \"\"\" pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self:", "-> List[ICachable] \"\"\" pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) -> (int,Countries) \"\"\" pass", "Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) \"\"\" pass", "Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass", "bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem \"\"\"", "shallow copy of the current System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject:", "CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate)", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance", "HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams)", "-> object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str", "ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str)", "object \"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int) -> object \"\"\" pass", "GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self:", "\"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass def", "General,seconds: int) -> str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe:", "ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) -> bool \"\"\" pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet)", "(TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations)", "-> DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\"", "DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass", "GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int \"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self:", "\"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId:", "\"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses):", "\"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str", "-> object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object", "def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) \"\"\" pass def GetDeviceByName(self,name,device): \"\"\"", "object,method: IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ()", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: UserName(self: CallerContext) -> str \"\"\" class Constants(object):", "pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\"", "Inventory,licensePlateId: int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass def", "# no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Constants() instance=ZZZ()", "def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\"", "(int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] \"\"\" pass", "pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors):", "self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda self,v: None,lambda", "NumberGeneration) -> object \"\"\" pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool", "class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args): \"\"\"", "pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self:", "class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self:", "NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\" pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) ->", "-> (bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) \"\"\"", "\"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args:", "int) -> object \"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) ->", "StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count])", "def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str) -> bool \"\"\" pass", "DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "GetErpName(self: General) -> str \"\"\" pass def GetErpSettings(self): \"\"\" GetErpSettings(self: General) -> SystemSettings", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def Debug(msg): \"\"\" Debug(msg: str)", "-> bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) ->", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,container): \"\"\" __new__(cls: type,container:", "GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId:", "-> CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts)", "-> DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) -> (bool,str) \"\"\" pass", "\"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass", "\"\"\" pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str) -> str \"\"\" pass def", "-> DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\"", "\"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args:", "\"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\" pass", "ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult", "\"\"\" pass def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str) -> (bool,Item) \"\"\" pass def", "General,zoneId: int) -> (int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) -> object", "\"\"\" DeleteModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg:", "pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass def", "PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg):", "GetZoneById(self: General,id: int) -> (bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str)", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) -> object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity):", "-> (bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) ->", "-> (int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] \"\"\"", "pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule \"\"\" pass def __init__(self,*args):", "GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args:", "pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId):", "\"\"\"hardcoded/returns an instance of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback:", "-> LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) ->", "\"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) ->", "PagedList[LogLine] \"\"\" pass def GetMacAddress(self): \"\"\" GetMacAddress(self: General) -> str \"\"\" pass def", "def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers): \"\"\"", "General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str) ->", "\"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable", "for signature \"\"\" pass @staticmethod def __new__(self,object,method): \"\"\" __new__(cls: type,object: object,method: IntPtr) \"\"\"", "pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\" pass @staticmethod def GetSnippetRoot():", "Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args:", "CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self:", "def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self:", "GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting:", "pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders):", "-> (int,Messages) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) -> object \"\"\" pass", "@staticmethod def Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass @staticmethod def Fatal(*__args):", "\"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes an element from the invocation list", "RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock", "\"\"\" pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\" pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self:", "IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "Inventory,key: CacheKey) -> bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str)", "-> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\"", "signature \"\"\" pass @staticmethod def __new__(self,object,method): \"\"\" __new__(cls: type,object: object,method: IntPtr) \"\"\" pass", "IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inventory() instance=ZZZ()", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self): \"\"\" __new__(cls: type) __new__(cls: type,info:", "If value is found in the invocation list for this instance,then a new", "DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass", "GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) ->", "\"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task \"\"\" pass def", "GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str)", "\"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass def StopMarshalledObjectFactories(self):", "pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass @staticmethod def", "DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool \"\"\" pass", "\"\"\" pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool \"\"\" pass", "General) \"\"\" pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\" pass def ClearResourceCache(self): \"\"\"", "\"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) -> int \"\"\" pass def", "self: object(),lambda self,v: None,lambda self: None) class IApplicationSettings: # no doc def ZZZ(self):", "\"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes):", "of the current System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) ->", "class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity):", "(int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\" pass def", "\"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args:", "pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents):", "\"\"\" GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch:", "GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str]", "(bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) \"\"\" pass", "DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass", "pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data: str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\"", "an instance of the class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder]", "\"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool \"\"\" pass def", "pass to the method represented by the current delegate.-or- null,if the method represented", "\"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def", "AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self:", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists:", "LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self:", "TransportPackages) -> TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) ->", "\"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self: None)", "GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self:", "StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions)", "def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self:", "RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey:", "\"\"\" GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode:", "whether schema information has been omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) ->", "-> bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications)", "def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg): \"\"\"", "DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self:", "General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns", "-> str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings)", "\"\"\"hardcoded/returns an instance of the class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass", "System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios.", "\"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool \"\"\" pass def", "\"\"\"hardcoded/mock instance of the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "-> (int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass", "SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool \"\"\" pass def SyncStock(self): \"\"\"", "DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\" pass def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) -> List[Operator]", "pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject):", "\"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) ->", "ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self:", "EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\"", "list; otherwise,this instance with its original invocation list. \"\"\" pass def __init__(self,*args): \"\"\"", "def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\"", "GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\" pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary)", "\"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey)", "pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args):", "General) -> (int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\"", "GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode:", "\"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self:", "\"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment \"\"\" pass def", "Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\"", "pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self):", "Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass @staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\"", "\"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\"", "int) -> NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange]", "Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info',", "A shallow copy of the current System.Object. \"\"\" pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self:", "instance of the class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "by the current delegate. args: An array of objects that are the arguments", "DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) ->", "-> (bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) ->", "\"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject:", "def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\"", "DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\"", "pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self):", "DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args:", "DataSet,name: str) Sends a notification that the specified System.Data.DataSet property is about to", "\"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) -> str \"\"\" pass def", "of the current System.Object. \"\"\" pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\"", "pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) \"\"\" pass def CheckServerHealth(self):", "GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) -> (int,Devices) \"\"\" pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General)", "DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass", "copy of the current System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name:", "Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) ->", "Returns: A shallow copy of the current System.Object. \"\"\" pass def __init__(self,*args): \"\"\"", "SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers \"\"\" pass def SendKey(self,endPoint,key): \"\"\"", "-> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\"", "int) -> DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\" pass def", "pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate):", "the current System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs]", "the current System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass", "int) \"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass", "DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs) -> bool", "pass @staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\" pass class", "Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs)", "DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders])", "-> (int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders)", "\"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass def", "@staticmethod def Debug(msg): \"\"\" Debug(msg: str) \"\"\" pass @staticmethod def Error(*__args): \"\"\" Error(msg:", "\"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound)", "System.Collections.Generic import * from ..__init__ import * # no functions # classes class", "-> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\"", "GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages:", "GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key:", "\"\"\" GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) \"\"\" pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name:", "GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General)", "Events=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the list of event handlers", "def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class PyLogger(object): # no doc", "\"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def", "def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter:", "def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts):", "bool \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int) -> ICachable", "-> bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location", "\"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass def", "\"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass def", "-> DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\"", "\"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def", "PrintLineBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs:", "InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool", "\"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg:", "# by generator 1.145 # no doc # no important from System.Collections.Generic import", "pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) \"\"\" pass def GetDeviceById(self,id,device):", "of the class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod", "-> (bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions)", "Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) ->", "(int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass", "type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None)", "Occurs when a System.Data.DataRelation object is removed from a System.Data.DataTable. relation: The System.Data.DataRelation", "\"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) \"\"\" pass def", "\"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid \"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\"", "GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId:", "def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists):", "\"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self:", "-> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\" pass def GetItemsAll(self,args,items): \"\"\"", "\"\"\" InitializeLifetimeService(self: NumberGeneration) -> object \"\"\" pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs)", "IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def", "DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass", "-> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\"", "been omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode", "Outbound) -> Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey)", "PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\"", "def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\"", "-> LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId:", "DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass", "DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object. Returns: true if the", "PrintLabel) -> bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs)", "-> DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\"", "System.MulticastDelegate that is equal to the specified delegate. value: The delegate to search", "pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def", "Inbound,groupGuid: Guid) -> bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode:", "List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\"", "def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey):", "\"\"\" pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate:", "CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag])", "NotificationSummary) \"\"\" pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass def", "-> (int,List[str]) \"\"\" pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) -> Hashtable \"\"\" pass", "\"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\" pass def", "(bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\"", "(int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass", "def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self:", "\"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter:", "GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args:", "def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int) -> str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\"", "def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey \"\"\" pass", "GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\" pass def GetErpName(self): \"\"\" GetErpName(self: General) -> str", "-> DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\"", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def SaveConfiguration(self,model):", "PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs])", "ScheduleScriptTasks(self: General) \"\"\" pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str) \"\"\" pass def", "CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items:", "-> DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\"", "System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be", "XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass def IsBinarySerialized(self,*args): \"\"\"", "def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs when a System.Data.DataTable is removed from", "from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a", "General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\"", "pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\"", "CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self:", "\"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def", "of the class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool \"\"\" pass def UploadModule(self,arg): \"\"\"", "pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str) -> object \"\"\" pass def ExecuteScriptTaskOnce(self,id):", "(bool,User) \"\"\" pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str) -> str \"\"\" pass", "DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts)", "GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self:", "\"\"\" pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes an element", "pass class BusinessLayerExtensions(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn:", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor:", "def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self:", "General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str)", "pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers):", "\"\"\" pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args):", "-> object Creates a shallow copy of the current System.Object. Returns: A shallow", "\"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\" pass def GetAppDomainList(self):", "\"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode:", "list of this System.MulticastDelegate that is equal to the specified delegate. value: The", "int) -> List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute]", "PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\"", "a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is", "(bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\" pass @staticmethod", "NotificationSummary) -> List[str] \"\"\" pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm", "-> (bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\"", "str,quantity: Decimal) -> bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems)", "CountGroupTypeEnum) -> int \"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups)", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "pass def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int) -> str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None):", "\"\"\"Gets the list of event handlers that are attached to this component. \"\"\"", "\"\"\" GetZoneById(self: General,id: int) -> (bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name:", "GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self:", "] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self:", "GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) ->", "-> str \"\"\" pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\" pass", "General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) ->", "AllocationSettings \"\"\" pass @staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass def", "of the class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\" pass", "pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses):", "(bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) \"\"\" pass", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass", "ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program", "System.Object. \"\"\" pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass", "\"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass def", "-> (int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\" pass", "str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions)", "return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def Clone(self): \"\"\" Clone(self:", "def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int)", "def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self:", "def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int) -> (bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\"", "pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass @staticmethod def", "\"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\" pass def", "def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\"", "\"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\" pass def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId:", "Returns: A delegate that is the new root of the System.MulticastDelegate invocation list.", "ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self:", "HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) ->", "instance of the class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\"", "pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def", "PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self:", "\"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for", "System.Object. \"\"\" pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self):", "-> bool \"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\" pass", "str,itemIds: List[str]) -> bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications)", "def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self:", "Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment:", "\"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int) -> str \"\"\" pass def", "\"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\" pass def", "def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\"", "\"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args:", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self:", "-> (int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\"", "\"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter:", "GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\"", "GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) ->", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self:", "the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\"", "ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self:", "x; see x.__class__.__doc__ for signature \"\"\" pass def __repr__(self,*args): \"\"\" __repr__(self: object) ->", "WarehouseTransferType) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str)", "None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v:", "\"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass", "-> DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def", "-> (bool,User) \"\"\" pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str) -> str \"\"\"", "-> (int,Items) \"\"\" pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\"", "-> DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass def", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda", "def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\"", "GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing)", "def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\"", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) -> object \"\"\" pass def IsNumberUsed(self,args):", "\"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Mailer(object): \"\"\" Mailer()", "pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents):", "pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications):", "OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the", "\"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck):", "pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch):", "pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems) \"\"\" pass def", "a System.Data.DataRelation object is removed from a System.Data.DataTable. relation: The System.Data.DataRelation being removed.", "GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) -> (int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg:", "Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass", "-> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\"", "def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\"", "-> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\"", "DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag]", "\"\"\" LogoutUser(self: General) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "\"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General)", "\"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General)", "-> (bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) \"\"\"", "\"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None)", "\"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self:", "self,v: None,lambda self: None) \"\"\"Gets the list of event handlers that are attached", "def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\" pass def", "@staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self: object(),lambda", "Returns a static method represented by the current System.MulticastDelegate. Returns: A static method", "\"\"\" GetUserByUserName(self: General,username: str) -> (bool,User) \"\"\" pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag:", "\"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment:", "-> str \"\"\" pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) -> (int,Users) \"\"\" pass", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: UserName(self: CallerContext) -> str \"\"\" class", "-> UiForm \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) -> object \"\"\" pass", "def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str) -> (bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\"", "None) \"\"\"Get: CachedSettings(self: General) -> SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self:", "LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass def LogoutClient(self): \"\"\" LogoutClient(self:", "\"\"\" pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\" pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self:", "GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int)", "PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files", "\"\"\" pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets):", "GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int) ->", "InitializeLifetimeService(self: Messaging) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General)", "pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass", "PrintBaseArgs) -> bool \"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\"", "pass class IExtendedServiceLocator: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def", "-> CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo:", "type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda", "BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\" pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args:", "-> bool \"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\" pass", "\"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass def", "DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) -> (bool,str) \"\"\"", "doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an", "(int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass", "\"\"\" pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends a notification that the", "pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass def", "an instance of the class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher", "-> CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str)", "initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes", "an instance of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\"", "GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self:", "General) -> ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool", "General) -> (int,ShipperServiceLinks) \"\"\" pass @staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str \"\"\"", "@staticmethod def __new__(self): \"\"\" __new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\" pass def", "DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey]", "GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str) -> (bool,User) \"\"\" pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self:", "of the class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "str) -> SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult", "\"\"\" GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound)", "self,v: None,lambda self: None) \"\"\"Get: Relations(self: DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject):", "-> LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\"", "\"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg:", "\"\"\" pass def GetServerDate(self): \"\"\" GetServerDate(self: General) -> DateTime \"\"\" pass def GetSessions(self,sessions):", "int,description: str,date: DateTime,ledgerCode: str) -> bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices:", "the class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class CallerContext(object): \"\"\" CallerContext()", "DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self:", "CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass", "RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "\"\"\" pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool \"\"\" pass def", "\"\"\" __new__(cls: type,object: object,method: IntPtr) \"\"\" pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\"", "-> SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: General)", "CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\"", "GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str]", "GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) ->", "Returns: A shallow copy of the current System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\"", "OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\"", "InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self):", "def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class CallerContext(object): \"\"\" CallerContext() \"\"\" def ZZZ(self):", "BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds:", "ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object \"\"\" pass def FinishUploadModule(self,arg): \"\"\"", "GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) ->", "\"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int \"\"\" pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self:", "str) -> bool \"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass", "\"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General)", "\"\"\" InitializeLifetimeService(self: General) -> object \"\"\" pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) ->", "def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self:", "-> bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem)", "\"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName:", "\"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self:", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "the payload. \"\"\" pass def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged", "AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass def AddTaskLogCleanupTask(self):", "def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\"", "Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) ->", "GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self:", "indicating whether schema information has been omitted from the payload. \"\"\" pass def", "IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self:", "(int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass", "ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self:", "pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "\"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations):", "\"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass", "GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) -> List[Operator] \"\"\" pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args:", "RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt'", "GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum)", "\"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\"", "functions # classes class AppHost(object): \"\"\" AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda", "managed resources. disposing: true to release both managed and unmanaged resources; false to", "pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) -> (int,Users) \"\"\" pass def GetUsersAll(self,users): \"\"\"", "bool \"\"\" pass def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings):", "self,v: None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda", "-> DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\"", "object \"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\" pass def", "\"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\"", "\"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\"", "-> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\"", "\"\"\" pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass def", "\"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args:", "Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Outbound()", "List[NumberRange] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) -> object \"\"\" pass def", "GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) -> Scanners \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning)", "\"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object \"\"\" pass", "null,if the method represented by the current delegate does not require arguments. Returns:", "\"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) \"\"\" pass def", "str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom:", "def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object \"\"\" pass def FinishUploadModule(self,arg):", "bool) -> (bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs]", "def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects the format of", "(int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int) -> (int,Users) \"\"\" pass", "DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int])", "pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object): # no", "GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self:", "System.Data.DataTable. relation: The System.Data.DataRelation being removed. \"\"\" pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table:", "\"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self:", "pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass def StopMarshalledObjectFactories(self): \"\"\"", "DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass", "PrintingOptions) -> bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel)", "component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) ->", "pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self:", "CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType:", "GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int)", "General,searchText: str) -> (int,List[str]) \"\"\" pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) -> List[ICachable]", "General) -> str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse)", "RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass", "pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\"", "SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\"", "__new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def", "\"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def Debug(msg): \"\"\" Debug(msg: str) \"\"\"", "def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\"", "CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey])", "\"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber:", "HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages \"\"\"", "\"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound)", "PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\"", "-> (int,Tags) \"\"\" pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget) -> (int,Tags) \"\"\"", "GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder)", "pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId):", "CacheKey,itemCode: str) -> bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId:", "General,labelId: int,testRun: bool) \"\"\" pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\" pass def", "def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs)", "\"\"\" pass @staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda", "(int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass", "\"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\"", "\"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args:", "AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool)", "\"\"\" Options=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Options(self: IApplicationSettings) -> str", "GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType)", "PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def Debug(msg): \"\"\" Debug(msg:", "Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams)", "GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\"", "def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass def GetPrinters(self): \"\"\"", "def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject:", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda", "\"\"\" GetUsersInactive(self: General) -> (int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int)", "instance of the class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\" pass def GetAppVersions(self):", "MulticastDelegate,follow: Delegate) -> Delegate Combines this System.Delegate with the specified System.Delegate to form", "RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool \"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\"", "\"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda self: object(),lambda self,v: None,lambda self:", "def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\"", "Delegate) -> Delegate Removes an element from the invocation list of this System.MulticastDelegate", "x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature", "Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs]", "GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs)", "\"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound)", "of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\"", "GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self:", "\"\"\" pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def", "\"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\"", "from ..__init__ import * # no functions # classes class AppHost(object): \"\"\" AppHost()", "int,userId: int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\" pass", "-> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\"", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate):", "Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] \"\"\" pass", "Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams)", "str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self:", "\"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\"", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,object,method): \"\"\"", "GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str) ->", "General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\" pass", "boundary. A value of false is usually appropriate. true to copy the current", "\"\"\" GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode:", "\"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter)", "-> (int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\"", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId):", "PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Relations(self: DataSet) ->", "\"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self:", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general:", "def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str \"\"\" pass def FormatActivationExceptionMessage(self,*args):", "self: None) class Mailer(object): \"\"\" Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "specified delegate. value: The delegate to search for in the invocation list. Returns:", "contains the event data. \"\"\" pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs", "\"\"\" GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name:", "ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid])", "def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\"", "(int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass", "General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) ->", "of the current System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) ->", "-> bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass def", "'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\"", "object. context: The System.Runtime.Serialization.StreamingContext object. Returns: true if the specified System.Runtime.Serialization.SerializationInfo represents a", "pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\" pass def InitializeLifetimeService(self):", "\"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass", "(bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\" pass", "def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines):", "General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) ->", "DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self:", "GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self:", "pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\" pass", "pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors):", "DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\"", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general): \"\"\"", "-> bool \"\"\" pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self:", "RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int \"\"\"", "NotificationCenter) -> object \"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\"", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self): \"\"\" __new__(cls: type)", "pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders):", "\"\"\" pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def", "pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId):", "\"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey)", "\"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] \"\"\" pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req:", "SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule) -> bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self:", "\"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\" pass def PurgeProfilingLog(self):", "General) -> str \"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int) -> (bool,ColliPreset)", "def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\"", "type,general: General) \"\"\" pass class PyLogger(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance", "Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) ->", "will cause remoting client calls to be routed to the remote server object.", "def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\"", "DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone]", "\"\"\" pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\" pass def", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda self:", "\"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int)", "bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs])", "\"\"\"hardcoded/mock instance of the class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "x.__class__.__doc__ for signature \"\"\" pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\"", "\"\"\" pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\"", "self,v: None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager:", "of the current System.Object. \"\"\" pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\"", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self:", "type,general: General) \"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self):", "list. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args:", "GetStdLibRoot() -> (bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification)", "-> SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\"", "-> bool \"\"\" pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool", "(bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation) \"\"\"", "None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v:", "def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg): \"\"\"", "pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass def", "pass def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) -> List[Operator] \"\"\" pass def GetPrinterRules(self,args): \"\"\"", "payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader:", "\"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self:", "\"\"\"hardcoded/returns an instance of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus)", "def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg): \"\"\"", "General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass", "current System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass def", "DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass", "CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs)", "ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation:", "pass def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg):", "pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag):", "pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders):", "CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines])", "\"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def", "\"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey:", "GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int)", "\"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self:", "\"\"\" GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory)", "\"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents):", "ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines])", "-> (bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\"", "Messaging,message: IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass def", "GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey) ->", "\"\"\" pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def", "def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId):", "(bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass", "str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo:", "\"\"\" pass UserName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: UserName(self: CallerContext) ->", "table: The System.Data.DataTable being removed. \"\"\" pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str)", "(int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass", "class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self:", "type,container: IUnityContainer) \"\"\" pass class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def", "that is about to change. \"\"\" pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader)", "General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass def GetMacAddress(self): \"\"\" GetMacAddress(self: General) -> str", "GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) ->", "the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object.", "(int,Customers) \"\"\" pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\" pass def", "pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\" pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General)", "GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) ->", "DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs])", "def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\"", "ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ()", "name: The name of the property that is about to change. \"\"\" pass", "-> (bool,Device) \"\"\" pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str) -> (bool,Device) \"\"\"", "GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self:", "AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass", "-> bool \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "\"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId:", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda self,v:", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General)", "DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) -> bool", "None) \"\"\"Get: Relations(self: DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self:", "instance of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass", "\"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass def", "GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args:", "AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists):", "DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone]", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program", "\"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) ->", "-> str \"\"\" pass def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\"", "(bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y] \"\"\" pass def __init__(self,*args):", "SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines)", "def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def", "-> (bool,Translation) \"\"\" pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str) -> Array[Byte] \"\"\"", "def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs):", "@staticmethod def __new__(self,object,method): \"\"\" __new__(cls: type,object: object,method: IntPtr) \"\"\" pass def __reduce_ex__(self,*args): pass", "pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self):", "General) -> (int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str])", "to be routed to the remote server object. Returns: A shallow copy of", "GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self:", "\"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\" pass def", "GetTagsAll(self: General) -> (int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str) ->", "\"\"\" pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self:", "the class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus):", "Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\"", "pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool \"\"\" pass def Sleep(self,seconds):", "pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self:", "AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode:", "StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem)", "SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations:", "of the class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self:", "resources used by the System.ComponentModel.MarshalByValueComponent and optionally releases the managed resources. disposing: true", "pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\"", "-> Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) \"\"\"", "during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an", "GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) \"\"\" pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str)", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound:", "str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) ->", "General) -> Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel)", "self,v: None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda", "(int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\" pass", "GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self:", "\"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode:", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def ResetNumberRange(self,dfObject):", "GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self:", "TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str)", "by the current delegate does not require arguments. Returns: The object returned by", "\"\"\" pass def StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self:", "str) -> (bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\"", "-> List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] \"\"\"", "General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str) ->", "-> object \"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int) -> object \"\"\"", "an instance of the class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class", "PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the event data.", "def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def ResetBarcodeSettingsToDefault(self):", "this instance,then a new System.Delegate without value in its invocation list; otherwise,this instance", "@staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self:", "str) -> (bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights)", "def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\"", "\"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically invokes (late-bound)", "DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript]", "str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) ->", "RemotePublishing) \"\"\" pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\" pass def", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass class IExtendedServiceLocator: # no doc", "\"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass def", "\"\"\"Get: Options(self: IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self: None)", "def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args): \"\"\"", "def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\"", "\"\"\" pass @staticmethod def Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass @staticmethod", "int,item: LicensePlateItem) -> LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem])", "-> Publisher \"\"\" pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def", "pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter:", "pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) ->", "by generator 1.145 # no doc # no important from System.Collections.Generic import *", "Publishers \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) -> object \"\"\" pass def", "CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self:", "SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers \"\"\" pass def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint:", "str,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) ->", "-> (bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation)", "\"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound)", "__new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class PyLogger(object): # no doc def", "self,v: None,lambda self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda", "General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) ->", "None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v:", "\"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def", "pass def StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self:", "HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) ->", "Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str)", "BeepContinuous(self: General,endPoint: str) \"\"\" pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool", "pass def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\" pass def GetMethodImpl(self,*args):", "'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion',", "-> str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\"", "copy of the current System.Object. \"\"\" pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs)", "AppVersions \"\"\" pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\" pass def", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance", "str) -> (bool,Translation) \"\"\" pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str) -> Array[Byte]", "self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda", "\"\"\" GetUserCacheData(self: General,tag: str) -> str \"\"\" pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General)", "\"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass def", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda", "(bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId:", "\"\"\" pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def", "\"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\"", "\"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\" Options=property(lambda self: object(),lambda self,v: None,lambda self: None)", "BatchFilterArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\"", "-> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\" pass", "-> Delegate Removes an element from the invocation list of this System.MulticastDelegate that", "a System.Data.DataSet. reader: The System.Xml.XmlReader instance that is passed during deserialization of the", "pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass def", "def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int \"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\"", "def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str) -> Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\"", "enumeration indicating whether schema information has been omitted from the payload. \"\"\" pass", "\"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current", "DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self:", "Inventory,countId: int) -> (bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) ->", "\"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args:", "\"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass def __init__(self,*args): \"\"\"", "the class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def", "-> DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\"", "DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass", "UsedNumberArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "change. \"\"\" pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args):", "\"\"\" GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) \"\"\" pass @staticmethod def GetLibRoot(): \"\"\" GetLibRoot()", "GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) -> int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self:", "General,endPoint: str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\"", "Batch) -> Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object)", "str \"\"\" class Constants(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the", "instance of the class\"\"\" @staticmethod def Debug(msg): \"\"\" Debug(msg: str) \"\"\" pass @staticmethod", "\"\"\" pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def", "AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self:", "-> BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\"", "\"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def", "str,warehouseLocationCode: str) -> (bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) ->", "-> DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass def", "def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass def LogoutClient(self): \"\"\"", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda", "of the class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "@staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self:", "(int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\" pass", "of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy", "-> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\"", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,object,method): \"\"\" __new__(cls:", "of the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds:", "-> bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass", "pass @staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\"", "ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self:", "Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key:", "GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self:", "pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects the format", "General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str) ->", "\"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass @staticmethod def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex:", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance", "pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds):", "pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool \"\"\" pass def", "a System.Data.DataTable is removed from a System.Data.DataSet. table: The System.Data.DataTable being removed. \"\"\"", "\"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "\"\"\" pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str) -> (bool,Zone) \"\"\" pass def", "def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\"", "str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom:", "def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool \"\"\" pass def", "AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\" pass def", "pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages):", "str) -> (int,List[str]) \"\"\" pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) -> List[ICachable] \"\"\"", "-> DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self:", "GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId:", "DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass", "PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo):", "ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass", "pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value):", "Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\"", "\"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int \"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter:", "-> DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) ->", "\"\"\" pass def GetMacAddress(self): \"\"\" GetMacAddress(self: General) -> str \"\"\" pass def GetModule(self,arg,module):", "IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CachedSettings(self: General)", "Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass @staticmethod def Fatal(*__args): \"\"\" Fatal(msg:", "pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions):", "-> (RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda", "pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool \"\"\" pass def", "Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass @staticmethod def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex:", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass def __repr__(self,*args): \"\"\"", "pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args):", "Relations=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Relations(self: DataSet) -> DataRelationCollection \"\"\"", "pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self): \"\"\"", "__new__(self): \"\"\" __new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\" pass def __reduce_ex__(self,*args): pass", "-> (int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\"", "(int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) -> Hashtable \"\"\" pass def", "StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class IApplicationSettings: # no doc def", "-> (int,ItemLocations) \"\"\" pass @staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass", "ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages:", "\"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone):", "\"\"\" pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass", "GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue)", "pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder):", "\"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self:", "the class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self:", "GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts)", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\"", "-> (int,ColliPresets) \"\"\" pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\" pass", "\"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool \"\"\" pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self:", "DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\"", "-> bool \"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\"", "-> List[Notification] \"\"\" pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\"", "release only unmanaged resources. \"\"\" pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CachedSettings(self: General) -> SystemSettings \"\"\" CurrentLicense=property(lambda", "pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg):", "pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass def", "\"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self:", "shallow copy of the current System.Object. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "(bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\" pass def", "class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "\"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification:", "List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\" pass def", "current System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\"", "Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) ->", "DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs]", "BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs)", "UploadNewLicense(self: General,xml: str) -> (bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs])", "def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args:", "GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value:", "-> bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool", "List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str)", "Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) ->", "\"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def", "pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) -> Hashtable \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str) -> str \"\"\" pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self:", "\"\"\" pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self:", "DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass", "\"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) \"\"\"", "NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\" pass", "class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\"", "def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self): \"\"\"", "List[PrintRule] \"\"\" pass def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) -> List[Printer] \"\"\" pass def", "pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool", "\"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass", "def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning)", "PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\"", "GetUserCacheData(self: General,tag: str) -> str \"\"\" pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) ->", "Outbound) -> (int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers)", "def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\"", "(int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass", "AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs)", "\"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user: User) ->", "None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self:", "SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) -> bool \"\"\" pass def", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning) -> License \"\"\" class", "pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\" pass def CreateMessage(self,message): \"\"\"", "-> (bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\" pass", "\"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass def", "str) -> (int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems)", "\"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self:", "pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\" pass def", "General,xml: str) -> (bool,List[str],License) \"\"\" pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) -> ServerHealthEnum", "AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self:", "pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\" pass def GetSerializationData(self,*args): \"\"\"", "Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) ->", "str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\"", "GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) ->", "str,key: str) \"\"\" pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message: str) \"\"\" pass", "DocumentQueue,rule: PrintRule) -> PrintRule \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "\"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg:", "bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool", "DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent", "instance of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate", "str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom:", "StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int) -> (bool,Tag)", "Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs)", "\"\"\" SaveErpSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule)", "Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) \"\"\"", "NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass", "Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey \"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self:", "General,settingsObject: SystemSettings) \"\"\" pass def LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\" pass def LogoutUser(self):", "\"\"\" pass def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def", "DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass", "\"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) ->", "pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks):", "-> (int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\"", "\"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id):", "General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str) \"\"\"", "-> (int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\"", "Inventory) -> (int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer", "-> (bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\"", "GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) ->", "GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) ->", "def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\"", "\"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass def", "\"\"\" GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General)", "Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) ->", "CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment])", "self: None) \"\"\"Gets the list of event handlers that are attached to this", "(int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass", "\"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self:", "None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v:", "(int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\" pass def", "DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\" pass", "\"\"\" GetScanners(self: OfflineScanning) -> Scanners \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) ->", "Type) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\"", "doc # no important from System.Collections.Generic import * from ..__init__ import * #", "\"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self:", "\"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def", "-> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str)", "The streaming context. \"\"\" pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType", "scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted from", "General) \"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass def AddTaskStockStreamTask(self): \"\"\"", "\"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\"", "General,id: int) -> object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int)", "pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key):", "OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel)", "pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def", "DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] \"\"\" pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs)", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: General) -> License Set: CurrentLicense(self:", "\"\"\" pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int \"\"\" pass def", "CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey", "\"\"\" GetErpSettings(self: General) -> SystemSettings \"\"\" pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) ->", "def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\"", "CacheKey) -> (bool,Batch) \"\"\" pass def GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch)", "Outbound,cacheKey: CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\"", "Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType:", "-> (bool,str) \"\"\" pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool", "DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\" pass", "pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\"", "-> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\"", "PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice:", "GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id:", "CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self:", "Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder]", "IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunApiKey(self:", "Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int)", "GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) ->", "\"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintPrintLines(self,key,label): \"\"\"", "\"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args:", "-> DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\"", "def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) -> (bool,str) \"\"\" pass def CreateDevice(self,arg): \"\"\" CreateDevice(self:", "\"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self:", "Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\"", "represented by the current delegate.-or- null,if the method represented by the current delegate", "GetSnippetRoot() -> str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) ->", "\"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\" pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound)", "pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\"", "def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\"", "\"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass def", "\"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str", "def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) \"\"\" pass def CheckServerHealth(self): \"\"\"", "\"\"\" pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\"", "List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\"", "\"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup]", "\"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda self,v: None,lambda self: None)", "GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool)", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,object,method): \"\"\" __new__(cls: type,object:", "def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass __all__=[ 'Debug', 'Error', 'Fatal',", "self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda", "pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType):", "\"\"\" pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool \"\"\" pass", "\"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self: None)", "bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) \"\"\"", "\"\"\" pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def", "pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self:", "\"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\"", "invoked with during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization", "RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "str \"\"\" class ICentralAuthoritySystem: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the", "\"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass", "-> (int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock])", "The System.Data.DataTable being removed. \"\"\" pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends", "int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass", "DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int])", "DataSet,reader: XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) -> bool \"\"\" pass", "return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...)", "-> List[Operator] \"\"\" pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\"", "PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self:", "\"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName:", "ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str) -> bool \"\"\" pass", "\"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject):", "def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\"", "\"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg:", "\"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] \"\"\" pass def", "\"\"\" GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode:", "-> bool \"\"\" pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str) -> (bool,License) \"\"\"", "for signature \"\"\" pass @staticmethod def __new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer) \"\"\" pass", "pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\" pass def GetPickListsTable(self): \"\"\"", "GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\"", "\"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int) -> object \"\"\" pass def", "__str__(self,*args): pass Events=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the list of", "\"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass def", "-> RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\" pass", "DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass", "-> IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\"", "str) -> bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\"", "pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str \"\"\" pass", "\"\"\" pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def", "\"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key:", "DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg): \"\"\" DeleteZone(self:", "Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations):", "LogoutUser(self: General) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates", "def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self:", "DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs]", "-> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during", "None,lambda self: None) \"\"\"Get: Options(self: IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v:", "DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self:", "DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\"", "str) -> bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\"", "OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self:", "pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\" pass def", "\"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound)", "def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\"", "-> DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\"", "\"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass def", "GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) -> object", "int) -> (bool,Tag) \"\"\" pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) -> (int,Tags) \"\"\"", "SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items):", "\"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v:", "\"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str", "-> (int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\" pass", "-> (int,Locations) \"\"\" pass def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\"", "pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets): \"\"\"", "# no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IExtendedServiceLocator() instance=ZZZ()", "\"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass def", "GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification)", "\"\"\" pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\" pass def MemberwiseClone(self,*args):", "\"\"\" GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str)", "GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\"", "(int,PreReceipts) \"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\" pass", "ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self:", "\"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass", "IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "\"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\" pass def LoadCache(self):", "follow: The delegate to combine with this delegate. Returns: A delegate that is", "\"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetStdLibRoot(path): \"\"\"", "CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel])", "\"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) ->", "GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\"", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls:", "AppHost) -> UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\"", "GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self:", "\"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound)", "\"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName:", "\"\"\" GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] \"\"\" pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg:", "\"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass def", "\"\"\" pass def GetVersion(self): \"\"\" GetVersion(self: General) -> str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse):", "\"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def", "GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self:", "\"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) \"\"\" pass def", "\"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\" pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args:", "DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass", "\"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args:", "\"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str) ->", "-> PagedList[LogLine] \"\"\" pass def GetMacAddress(self): \"\"\" GetMacAddress(self: General) -> str \"\"\" pass", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging:", "\"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs)", "Debug(msg): \"\"\" Debug(msg: str) \"\"\" pass @staticmethod def Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex:", "General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId:", "NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) ->", "ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode:", "GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int) -> str \"\"\" pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self:", "\"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey:", "\"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the", "General,userId: int) -> (bool,User) \"\"\" pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str) ->", "OnGetDestinationLocationForLine) -> bool \"\"\" pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder]", "RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\"", "CreateModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent])", "NotificationSummary,executionType: str) -> UiForm \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) -> object", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "\"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self:", "information has been omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines", "\"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass def", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\" Options=property(lambda", "-> bool \"\"\" pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) -> bool \"\"\" pass", "GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) ->", "GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) ->", "ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool \"\"\" pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self:", "self,v: None,lambda self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda", "Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass", "PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label:", "AddUserToZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint:", "GetScreenshot(self: General,accessId: str) -> Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str)", "StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with", "GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items)", "self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda", "pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging):", "SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\" pass def", "CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns", "delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned", "General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str) ->", "\"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self:", "-> str \"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings)", "GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs)", "def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int) -> (bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone): \"\"\"", "GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General)", "\"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self: None)", "Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line:", "\"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject:", "-> (int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) -> Hashtable \"\"\" pass", "Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task", "\"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey \"\"\" pass def", "the class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId):", "remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self:", "SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool \"\"\" pass def SyncStock(self): \"\"\" SyncStock(self: Inventory)", "\"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self:", "UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns", "GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType]", "IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return General()", "the class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\" pass def", "-> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false", "RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General)", "\"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self:", "\"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str) -> (bool,Item) \"\"\" pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self:", "see x.__class__.__doc__ for signature \"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None)", "PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\" pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self:", "GetCopyOfCache(self: General) -> List[ICachable] \"\"\" pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) -> (int,Countries)", "pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args):", "GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General)", "GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self:", "PagingParams) -> (int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\"", "\"\"\" pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents):", "str) -> bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) ->", "def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents): \"\"\"", "HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass", "BaseException) \"\"\" pass @staticmethod def Info(msg): \"\"\" Info(msg: str) \"\"\" pass @staticmethod def", "Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self:", "\"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) ->", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls:", "instance of the class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class", "Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams)", "General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot() ->", "The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object. Returns: true if the specified System.Runtime.Serialization.SerializationInfo", "\"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\" pass def", "\"\"\" GetZonesAll(self: General) -> (int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs)", "\"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode", "HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) ->", "\"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) ->", "pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId):", "pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode", "str) -> bool \"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool", "int) -> CountGroup \"\"\" pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup", "(DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\" pass def", "-> PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def", "GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode:", "\"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool \"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\"", "no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns", "\"\"\" InitializeLifetimeService(self: NotificationSummary) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\"", "-> (int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass def", "DataSet) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\"", "\"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\"", "(int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\"", "\"\"\" pass def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int) -> str \"\"\" pass def", "(x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}'", "def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\"", "def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args): \"\"\"", "General) -> List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) -> str", "\"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self:", "pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\"", "type,appSettings: IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "@staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda self:", "about to change. \"\"\" pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass", "def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) -> (bool,str) \"\"\" pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self:", "-> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\"", "UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom:", "CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self:", "DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self:", "(bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\" pass def", "ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object):", "CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders)", "-> DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\"", "(bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] \"\"\" pass", "UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "\"\"\" pass @staticmethod def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass __all__=[", "DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] \"\"\" pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self:", "pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def", "__new__(cls: type,general: General) \"\"\" pass class PyLogger(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock", "pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General)", "GetChacheStatus(self: General) -> str \"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int) ->", "def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines):", "Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey)", "A static method represented by the current System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\"", "DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) ->", "GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) ->", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,general): \"\"\" __new__(cls:", "\"\"\" pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task):", "\"\"\" ExecuteScriptTaskOnce(self: General,id: int) -> object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script:", "(DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass", "\"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs)", "GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals)", "None,lambda self: None) class Mailer(object): \"\"\" Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self:", "Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) ->", "-> (int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\" pass", "General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key:", "General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock) \"\"\"", "\"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self: None)", "PagingParams) -> (int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem]", "CancellationToken) -> Task \"\"\" pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass", "ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\" pass def CompileScript(self,script): \"\"\" CompileScript(self: General,script: str) ->", "General) -> (int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity)", "CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds:", "Outbound,batchCacheKey: CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\"", "-> (int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\" pass", "@staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self:", "int \"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) \"\"\" pass", "\"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification:", "\"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass def", "pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\"", "__new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration()", "shallow copy of the current System.Object. \"\"\" pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId:", "FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str \"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\"", "StartBosInboundListener(self: OfflineScanning) -> bool \"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite:", "def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int) -> (bool,User) \"\"\" pass def GetUserByUserName(self,username,user): \"\"\"", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) -> object \"\"\" pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self:", "GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self:", "None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v:", "AddOrUpdateErpLock(self: General,lock: ErpLock) -> int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock)", "MethodInfo Returns a static method represented by the current System.MulticastDelegate. Returns: A static", "General,username: str) -> (bool,User) \"\"\" pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str) ->", "ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType:", "General,key: CacheKey,line: PrintLineBase) -> bool \"\"\" pass def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs)", "pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self:", "self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda", "OfflineScanning) -> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def ZZZ(self):", "\"\"\" NotificationCenter(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationCenter()", "-> XmlSchema \"\"\" pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the", "\"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy:", "\"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable", "\"\"\"hardcoded/returns an instance of the class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) ->", "OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OnGetDestinationLocationForLine()", "pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int \"\"\" pass def GetCountGroups(self,filter,countGroups):", "pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference):", "def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items): \"\"\"", "\"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning) ->", "\"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\"", "def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\"", "def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\"", "Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey) ->", "DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass", "def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self:", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing) -> License \"\"\" #", "GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetServerDate(self): \"\"\" GetServerDate(self: General) -> DateTime", "\"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda", "GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid:", "GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\" pass @staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str", "def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self:", "@staticmethod def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod def Info(msg):", "return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self:", "pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages):", "the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the event data. \"\"\" pass", "\"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg:", "the specified delegate. value: The delegate to search for in the invocation list.", "CountGroupTypeEnum) -> CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) ->", "pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self):", "PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def", "\"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass def LogoutClient(self): \"\"\" LogoutClient(self: General)", "-> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool", "data from the binary or XML stream. info: The System.Runtime.Serialization.SerializationInfo instance. context: The", "__new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\" pass def __reduce_ex__(self,*args): pass def __str__(self,*args):", "\"\"\" LogoutClient(self: General) \"\"\" pass def LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\" pass def", "GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes:", "ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey)", "UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\"", "(int,Items) \"\"\" pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass", "\"\"\" pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CachedSettings(self: General) ->", "\"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\" pass def GetPickListsTable(self):", "str) -> List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users", "__all__=[ 'ConvertTo', ] class CallerContext(object): \"\"\" CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items):", "the current System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]", "of the class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "\"\"\" GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General)", "\"\"\" pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\" pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self:", "General) \"\"\" pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass", "GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self:", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) -> object \"\"\" pass def MemberwiseClone(self,*args):", "PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\"", "(bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass", "RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str \"\"\"", "-> bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) ->", "AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId): \"\"\"", "def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The System.Xml.XmlReader instance that is passed during", "Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines:", "System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted from the payload. DetermineSchemaSerializationMode(self:", "\"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid:", "pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass def GetAllItemIdentifications(self,filterBy):", "\"\"\"hardcoded/returns an instance of the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems)", "OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) -> bool \"\"\" pass", "'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment',", "\"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass def", "GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\"", "DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass", "\"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) \"\"\" pass def", "pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages):", "Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "\"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def", "\"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject:", "IApplicationSettings: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IApplicationSettings()", "pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\" pass", "pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines):", "-> SystemSettings \"\"\" pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass", "-> bool \"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int", "PrintPackageSlipArgs) -> bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel)", "pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self:", "Type,key: str) -> str \"\"\" pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool", "Printing) -> (int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\"", "AddTaskAutoDisposeTask(self: General) \"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def AddTaskErpLockingTask(self):", "int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count):", "\"\"\" ExecuteCommand(self: General,command: str) -> str \"\"\" pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script:", "Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) ->", "Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey:", "def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int) -> str \"\"\" pass def GetChacheStatus(self): \"\"\"", "CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str)", "def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\"", "\"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args:", "ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}'", "RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs)", "new identity when it is marshaled across a remoting boundary. A value of", "General) \"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def AddTaskErpLockingTask(self): \"\"\"", "str) -> (int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) ->", "GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self:", "\"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location \"\"\" pass", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: StockManager(self: Outbound) -> IStockManager \"\"\" class Printing(MarshalByRefObject):", "PagingParams) -> (int,Items) \"\"\" pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems)", "signature \"\"\" pass @staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\" pass", "StockManager(self: Outbound) -> IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\" def ZZZ(self):", "GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args:", "def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass", "Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) ->", "class Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inbound()", "DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey]", "pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self:", "-> DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass def", "-> (bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\" pass", "CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg): \"\"\" CreateCount(self:", "AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) -> int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self:", "pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool \"\"\" pass", "-> str \"\"\" class ICentralAuthoritySystem: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of", "\"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General)", "PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder", "def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self:", "InitializeLifetimeService(self: NumberGeneration) -> object \"\"\" pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) ->", "AddTaskStockStreamTask(self: General) \"\"\" pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user: User) -> bool", "-> Task[Stream] \"\"\" pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\"", "-> (int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\" pass", "Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking'", "copy of the current System.Object. \"\"\" pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid)", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an", "pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass def", "\"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages \"\"\" pass", "pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\" pass", "x.__class__.__doc__ for signature \"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\" def ZZZ(self):", "GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\"", "DocumentQueue,jobId: Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def", "__new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None)", "GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) ->", "pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line):", "PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\" pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\" pass", "CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\"", "GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self:", "def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self: object(),lambda", "pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications):", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext)", "\"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args:", "A value of false is usually appropriate. true to copy the current System.MarshalByRefObject", "return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self:", "-> Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] \"\"\"", "class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object): # no doc def", "str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo:", "CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass", "pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args):", "pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass def", "\"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self:", "the class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" class Messaging(MarshalByRefObject):", "ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args:", "invoked with during deserialization in remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether", "pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args):", "current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the", "General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) ->", "pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets):", "pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\"", "\"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self:", "Inventory,lp: LicensePlate) -> LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) ->", "str) -> (bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines)", "FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str \"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException:", "bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool", "ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) ->", "CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\" pass def", "IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool \"\"\" pass", "def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg): \"\"\"", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "InboundOrderTypeEnum) -> bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass", "int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) ->", "SendMouseClick(self: General,endPoint: str,x: int,y: int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey)", "return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self:", "\"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self:", "Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) ->", "\"\"\" SaveCache(self: General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse]", "General) -> (int,ScriptTasks) \"\"\" pass def GetServerDate(self): \"\"\" GetServerDate(self: General) -> DateTime \"\"\"", "GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self:", "PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment", "pass @staticmethod def Trace(msg): \"\"\" Trace(msg: str) \"\"\" pass @staticmethod def Warn(*__args): \"\"\"", "pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "\"\"\"hardcoded/mock instance of the class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\"", "\"\"\" InitializeLifetimeService(self: Inbound) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass", "OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey)", "return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self:", "__repr__(self,*args): \"\"\" __repr__(self: object) -> str \"\"\" pass UserName=property(lambda self: object(),lambda self,v: None,lambda", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) ->", "-> DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages)", "the class\"\"\" def Clone(self): \"\"\" Clone(self: DataSet) -> DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args):", "Dictionary[str,object]) -> object \"\"\" pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs) -> bool", "DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey)", "Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) ->", "Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode:", "str) -> object \"\"\" pass def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int) -> object", "false to release only unmanaged resources. \"\"\" pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet)", "\"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass", "-> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Relations(self: DataSet)", "bool \"\"\" pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\" pass def CleanupUserCacheData(self): \"\"\"", "IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self:", "DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification])", "General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self:", "\"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders):", "\"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool \"\"\" pass", "AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self:", "(bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location)", "the class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddUsedNumber(self,args):", "DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key:", "pass def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged resources used by", "GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs)", "ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) ->", "pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\" pass def EnsureLicenseExists(self):", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self): \"\"\" __new__(cls:", "pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\"", "see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass def", "self: None) \"\"\"Get: CurrentLicense(self: General) -> License Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self:", "\"\"\" GetChacheStatus(self: General) -> str \"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int)", "def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes an element from the", "(int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass def", "ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self:", "\"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass def", "General,warehouseCode: str,warehouseLocationCode: str) -> Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode:", "\"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool \"\"\" pass", "pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda", "DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object]", "-> WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\"", "MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns:", "General,args: ChangeBarcodeArgs) -> bool \"\"\" pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) -> bool", "CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\"", "(int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems) \"\"\"", "AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity)", "Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\"", "\"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str", "routed to the remote server object. Returns: A shallow copy of the current", "pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations):", "-> object \"\"\" pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\" pass", "CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine])", "ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId:", "pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General)", "AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists):", "-> (int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\"", "\"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass", "\"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str", "def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool \"\"\" pass def SyncStock(self):", "CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch)", "List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) -> str \"\"\" pass", "GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) \"\"\" pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) ->", "(bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) -> object \"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self:", "-> (int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\" pass", "(bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass", "\"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x: int,y: int) \"\"\" pass def", "IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\"", "\"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def", "\"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes:", "General,arg: GetLibArgs) -> (int,LibContents) \"\"\" pass @staticmethod def GetLibRoot(): \"\"\" GetLibRoot() -> str", "pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey", "str,orderNumber: str) -> bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs)", "-> bool Inspects the format of the serialized representation of the DataSet. info:", "pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str) \"\"\" pass", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self: AppHost) ->", "int) -> (bool,User) \"\"\" pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str) -> (bool,User)", "StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool)", "\"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass def", "RemotePublishing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req:", "def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged resources used by the", "pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid \"\"\" pass", "def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message):", "(bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass", "System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object. Returns: true if the specified System.Runtime.Serialization.SerializationInfo represents", "a new delegate. follow: The delegate to combine with this delegate. Returns: A", "\"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses):", "ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult", "Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) ->", "\"\"\" pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects the", "GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str)", "pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveModule(self,module): \"\"\"", "DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass", "RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool \"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key:", "pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\"", "\"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter:", "\"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass", "Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) \"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode:", "a notification that the specified System.Data.DataSet property is about to change. name: The", "\"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) \"\"\" pass def", "def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem \"\"\" pass def", "General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) ->", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) ->", "str) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\"", "def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\"", "self,v: None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\" Options=property(lambda self: object(),lambda", "ModuleArgs) -> bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent]", "PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) ->", "None) \"\"\"Gets the list of event handlers that are attached to this component.", "-> (int,ScriptTasks) \"\"\" pass def GetServerDate(self): \"\"\" GetServerDate(self: General) -> DateTime \"\"\" pass", "A shallow copy of the current System.Object. \"\"\" pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self:", "str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines)", "-> List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) \"\"\"", "ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self:", "DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects the format of the serialized representation of", "DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine]", "pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\" pass def SaveCache(self): \"\"\" SaveCache(self: General)", "\"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def", "\"\"\" pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) -> ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey):", "def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str) \"\"\" pass def", "General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass def LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\" pass", "\"\"\" pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def", "pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines):", "GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy:", "\"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\" pass def", "pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self:", "ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch:", "General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns", "# classes class AppHost(object): \"\"\" AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\"", "General) -> (int,List[ErpLock]) \"\"\" pass def GetErpName(self): \"\"\" GetErpName(self: General) -> str \"\"\"", "\"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass def", "\"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs)", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance", "-> (bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications)", "ColloReference,cacheKey: CacheKey) -> bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs)", "-> (bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool) -> (int,Zones) \"\"\"", "GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId:", "WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey)", "Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete", "DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject):", "-> (int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService])", "\"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) ->", "DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool", "self,v: None,lambda self: None) \"\"\"Get: Tables(self: DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None", "def UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "\"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber:", "of the class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "-> (int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\"", "\"\"\" GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs)", "RestartScriptEngine(self: General) \"\"\" pass def SaveCache(self): \"\"\" SaveCache(self: General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse):", "in the invocation list. Returns: If value is found in the invocation list", "pass @staticmethod def __new__(self): \"\"\" __new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\" pass", "DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) ->", "-> (int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\" pass", "GetMacAddress(self: General) -> str \"\"\" pass def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs) ->", "OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation object is removed from", "\"\"\" GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key:", "-> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self: None)", "\"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\"", "the System.ComponentModel.MarshalByValueComponent and optionally releases the managed resources. disposing: true to release both", "\"\"\" pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def", "DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass", "str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) ->", "copy of the current System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str)", "AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self:", "\"\"\"hardcoded/mock instance of the class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "\"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) ->", "def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\" pass def SaveCache(self): \"\"\" SaveCache(self: General) \"\"\"", "cause the object to be assigned a new identity when it is marshaled", "DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass", "\"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\" pass", "represented by the delegate. \"\"\" pass def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) ->", "\"\"\" CleanupCacheHistory(self: General) \"\"\" pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\" pass def", "GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass @staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() ->", "def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary)", "(int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\" pass def", "OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type:", "GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment:", "A shallow copy of the current System.Object. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) ->", "BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "\"\"\" pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self:", "def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\"", "-> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications)", "ErpLock) \"\"\" pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass", "\"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def", "Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey)", "DataSet) -> bool \"\"\" pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) -> bool \"\"\"", "NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup):", "def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order):", "GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self:", "General) -> (int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\"", "Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key:", "pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self:", "__reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "-> bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool", "GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args:", "-> BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates", "\"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\" pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) ->", "GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\" pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self:", "\"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId):", "-> (bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\" pass", "remoting boundary. A value of false is usually appropriate. true to copy the", "DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) ->", "CountFilter,type: CountTypeEnum) -> int \"\"\" pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass", "AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) \"\"\" pass", "UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self:", "def GetMacAddress(self): \"\"\" GetMacAddress(self: General) -> str \"\"\" pass def GetModule(self,arg,module): \"\"\" GetModule(self:", "resources. \"\"\" pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\" pass def", "PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str)", "def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\"", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging:", "\"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject:", "(int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\"", "\"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\"", "-> (bool,User) \"\"\" pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str) -> (bool,User) \"\"\"", "GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams:", "DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup])", "object) -> IAsyncResult \"\"\" pass def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate", "current System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str", "pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self:", "The delegate to search for in the invocation list. Returns: If value is", "def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch) -> Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\"", "@staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda self:", "General,arg: ModuleArgs) -> bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) ->", "int \"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange \"\"\" pass", "DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\" pass", "return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self:", "General,memberName: str,value: object) \"\"\" pass def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass", "\"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetPrintAllocationSettings():", "\"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass def ExecuteCommand(self,command): \"\"\"", "pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task \"\"\" pass def UpdateMessage(self,message):", "GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs)", "\"\"\" pass @staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations):", "XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The System.Xml.XmlReader instance", "def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\"", "ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\" pass", "(int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\"", "str) -> str \"\"\" pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) -> (int,Users) \"\"\"", "SerializationInfo,context: StreamingContext) \"\"\" pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda self: object(),lambda", "-> object \"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\" pass", "PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self:", "str) -> (bool,Device) \"\"\" pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str) -> (bool,Device)", "NotificationSummary) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "\"\"\" IsProfilerRunning(self: General) -> bool \"\"\" pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation])", "Outbound) -> (int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders)", "CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args:", "\"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id:", "General,command: str) -> str \"\"\" pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str) ->", "OfflineScanning) -> Scanners \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) -> object \"\"\"", "RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "\"\"\"hardcoded/mock instance of the class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent:", "pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass def", "\"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def", "pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) \"\"\" pass def GetZonesAll(self,zones):", "signature \"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\"", "the class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def", "\"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args:", "DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass", "NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId):", "specified System.Delegate to form a new delegate. follow: The delegate to combine with", "Returns: A shallow copy of the current System.Object. \"\"\" pass def ResetNumberRange(self,dfObject): \"\"\"", "\"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self: object(),lambda self,v: None,lambda self: None)", "\"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\" pass def", "def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def CreateShipperServiceLink(self,arg): \"\"\"", "\"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) \"\"\" pass def", "GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue) ->", "-> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self:", "DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) ->", "Batches) -> bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass", "RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args:", "PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self:", "instance of the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def", "DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass", "DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) ->", "\"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\" pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req:", "\"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass def", "def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg): \"\"\"", "\"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def", "str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task \"\"\" pass", "PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode:", "Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) ->", "-> (bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) ->", "\"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int)", "PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool \"\"\" pass def ProcessBatchPacking(self,dfObject):", "CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode:", "InitializeLifetimeService(self: DocumentQueue) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey)", "GetTagById(self: General,id: int) -> (bool,Tag) \"\"\" pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) ->", "format,false otherwise. \"\"\" pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs)", "def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg): \"\"\"", "int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) ->", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) -> object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self:", "StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass", "System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted from the payload. \"\"\"", "(bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) ->", "(int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\" pass", "(int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\" pass def", "\"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args:", "\"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id):", "\"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass def", "def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\"", "UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self:", "ItemIdentifications \"\"\" pass def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str)", "\"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks):", "PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date:", "Outbound,id: int) -> AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) ->", "GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "(DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass", "General,zoneId: int,zoneRights: ZoneRightViews) -> bool \"\"\" pass def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int)", "\"\"\" pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass def ReissueMessages(self,messageIds): \"\"\"", "bool Inspects the format of the serialized representation of the DataSet. info: The", "GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\" pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) ->", "Guid,newStatus: MessageStatus) \"\"\" pass def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\" pass def", "List[int]) \"\"\" pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent \"\"\" pass", "\"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self:", "def SaveTranslations(self,translations): \"\"\" SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General)", "class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: General) -> License", "\"\"\" pass def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def", "DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str)", "(int,Devices) \"\"\" pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\" pass def", "\"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult \"\"\" pass def CombineImpl(self,*args):", "pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object \"\"\" pass def", "OfflineScanning) -> LicenseAppVersion \"\"\" pass def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) -> Scanners \"\"\"", "DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) ->", "-> bool \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) -> object \"\"\" pass", "def GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args:", "\"\"\"hardcoded/mock instance of the class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass def LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\" pass def", "int) -> (bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) ->", "\"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls:", "delegate. Returns: A delegate that is the new root of the System.MulticastDelegate invocation", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer)", "\"\"\" CreateDatabase(self: General) -> (bool,str) \"\"\" pass def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device])", "(bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature", "\"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str", "pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool \"\"\" pass def", "BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass", "GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs)", "GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs)", "pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self:", "def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) -> bool \"\"\" pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self:", "true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause", "GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self:", "# module Wms.RemotingImplementation calls itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145", "AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult", "GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args:", "-> bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) ->", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda self:", "DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\" pass", "MessageStatus) \"\"\" pass def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId):", "(bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\" pass def", "DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "the current System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir:", "pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\"", "Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory) -> object", "pass def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count)", "pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str \"\"\" pass def", "General,memberName: str,value: object) \"\"\" pass def SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule) -> bool", "for signature \"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue)", "Decimal) -> bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) ->", "def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\"", "\"\"\" GetCopyOfCache(self: General) -> List[ICachable] \"\"\" pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) ->", "IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestBaseUri(self:", "Publisher \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def GetRemotePublishers(self): \"\"\"", "class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult \"\"\"", "\"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str) -> bool", "PagingParams) -> (int,Messages) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging) -> object \"\"\"", "current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will", "CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\"", "Returns: A shallow copy of the current System.Object. \"\"\" pass def ReissueMessage(self,messageId): \"\"\"", "GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) ->", "\"\"\" pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) -> List[ICachable] \"\"\" pass def GetCountriesActive(self,countries):", "\"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey:", "Printing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass", "-> (bool,Tag) \"\"\" pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) -> (int,Tags) \"\"\" pass", "(int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass", "\"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) ->", "def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) -> (int,Users) \"\"\" pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self:", "\"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\"", "CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass", "str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass", "object(),lambda self,v: None,lambda self: None) class Mailer(object): \"\"\" Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "\"\"\" pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self):", "DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self):", "pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\" pass", "\"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation) \"\"\" pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self:", "General) -> SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink)", "both managed and unmanaged resources; false to release only unmanaged resources. \"\"\" pass", "General) \"\"\" pass def SaveCache(self): \"\"\" SaveCache(self: General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\"", "the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted", "\"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult", "-> str \"\"\" pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a", "return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self:", "DataFlowObject[CacheKey] \"\"\" pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass", "pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\"", "OfflineScanning,args: AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\" pass", "def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self:", "DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock: ErpLock)", "-> (int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\" pass", "self,v: None,lambda self: None) \"\"\"Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\"", "MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject", "ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}'", "\"\"\" pass @staticmethod def Info(msg): \"\"\" Info(msg: str) \"\"\" pass @staticmethod def Trace(msg):", "def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args:", "\"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber:", "Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) ->", "(bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\"", "IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self:", "\"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs)", "pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone:", "DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders]", "ExtendedUnityServiceLocator,type: Type) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda", "General) -> (int,ColliPresets) \"\"\" pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\"", "pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) -> (int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\"", "bool) -> bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) ->", "Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) ->", "CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) ->", "General,zone: Zone,user: User) -> bool \"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) ->", "\"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) ->", "GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\" pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) ->", "List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass", "(int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass", "\"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems) \"\"\" pass", "\"\"\" GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) ->", "__new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda", "RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\" pass def SaveCache(self): \"\"\" SaveCache(self: General) \"\"\" pass", "General,target: TagTarget) -> (int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) -> (int,Translations)", "\"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\"", "object) \"\"\" pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass", "-> LicenseAppVersion \"\"\" pass def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) -> Scanners \"\"\" pass", "DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\" pass def LoadCache(self): \"\"\" LoadCache(self: General)", "-> (bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def", "DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool \"\"\" pass", "\"\"\" pass def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int) -> (bool,Tag) \"\"\" pass def", "\"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "-> str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\"", "GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str)", "UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[", "CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\" pass def", "pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\" pass def PrintPackageSlip(self,args):", "Messaging) -> DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult", "ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem)", "-> SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\"", "class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def ConvertTo(db,mapExpr):", "-> DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\"", "def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\"", "Outbound,args: DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\"", "CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset])", "def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\"", "of the class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" class", "def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self:", "\"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self:", "\"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass", "PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\"", "BlobContent \"\"\" pass def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass def", "\"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass def", "None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self:", "GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs)", "def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\"", "IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self:", "PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool \"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self:", "\"\"\" pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\"", "MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object.", "GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self:", "CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) -> bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item):", "MulticastDelegate) -> MethodInfo Returns a static method represented by the current System.MulticastDelegate. Returns:", "def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) ->", "General,endPoint: str,x: int,y: int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) ->", "current System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\"", "BusinessLayerExtensions(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return BusinessLayerExtensions()", "def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str) \"\"\" pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args:", "Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial'", "server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object)", "object returned by the method represented by the delegate. \"\"\" pass def EndInvoke(self,result):", "def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\"", "Outbound) -> OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs])", "def Debug(msg): \"\"\" Debug(msg: str) \"\"\" pass @staticmethod def Error(*__args): \"\"\" Error(msg: str)Error(ex:", "GetItemStockTotalsArgs) -> (bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents)", "(BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\" pass def", "across a remoting boundary. A value of false is usually appropriate. true to", "(bool,Item) \"\"\" pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str) -> bool \"\"\" pass", "\"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self:", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject):", "List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\" pass", "(OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass", "of the class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" AdminZoneId=2", "\"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int", "PrintLineBase) -> bool \"\"\" pass def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs) -> bool", "\"\"\" GetTagById(self: General,id: int) -> (bool,Tag) \"\"\" pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General)", "General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) ->", "\"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg:", "-> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\" pass", "\"\"\" pass @staticmethod def __new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer) \"\"\" pass class General(MarshalByRefObject):", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count])", "-> (int,Users) \"\"\" pass def GetVersion(self): \"\"\" GetVersion(self: General) -> str \"\"\" pass", "GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id:", "def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\"", "(int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\" pass def", "-> (int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\"", "omitted from the payload. \"\"\" pass def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases", "def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg): \"\"\"", "GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging:", "int,itemId: str) -> bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem)", "def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath:", "-> DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs) -> bool \"\"\"", "def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\"", "-> object \"\"\" pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs) -> bool \"\"\"", "pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args):", "(bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str)", "CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str) ->", "of the current System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) ->", "x.__class__.__doc__ for signature \"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) -> object \"\"\" pass def IsNumberUsed(self,args): \"\"\"", "PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self:", "bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass def", "-> (bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\"", "Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) ->", "def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg): \"\"\"", "\"\"\" pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) \"\"\" pass @staticmethod", "\"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\" pass def GetCounts(self,filterBy,pagingParams,counts): \"\"\" GetCounts(self: Inventory,filterBy:", "object(),lambda self,v: None,lambda self: None) \"\"\"Gets the list of event handlers that are", "GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\"", "Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\"", "\"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object \"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self:", "Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) ->", "object to be assigned a new identity when it is marshaled across a", "pass def LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\" pass def LogoutUser(self): \"\"\" LogoutUser(self: General)", "(bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass", "General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes)", "Guid) \"\"\" pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\" pass def", "RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch)", "\"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\" pass def", "RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes an element from the invocation list of", "\"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args:", "def CompileScript(self,script): \"\"\" CompileScript(self: General,script: str) -> List[PythonError] \"\"\" pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\"", "Inventory,key: CacheKey) -> (bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int) ->", "def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\" pass", "Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) ->", "GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) ->", "an instance of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass", "optionally releases the managed resources. disposing: true to release both managed and unmanaged", "\"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\"", "pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetExecutionContexts(self): \"\"\"", "List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs)", "SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self:", "# no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return BusinessLayerExtensions() instance=ZZZ()", "\"\"\" pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent):", "GetErpName(self): \"\"\" GetErpName(self: General) -> str \"\"\" pass def GetErpSettings(self): \"\"\" GetErpSettings(self: General)", "IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass def DequeueNextMessage(self):", "IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "__new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v:", "General,arg: ModuleArgs) -> bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) ->", "pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\" pass def HasNotifications(self,filterOn):", "\"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder):", "shallow copy of the current System.Object. Returns: A shallow copy of the current", "General) -> bool \"\"\" pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation]", "DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders):", "\"\"\" pass def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services):", "def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self:", "General) \"\"\" pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) -> (bool,str) \"\"\" pass def", "pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\" pass def GetMessage(self,messageId):", "TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject:", "System.Delegate with the specified System.Delegate to form a new delegate. follow: The delegate", "PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines:", "def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass def GetCount(self,*__args): \"\"\"", "schema information has been omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode", "def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self): \"\"\"", "Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) \"\"\" pass", "\"\"\" pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) ->", "\"\"\" pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table data", "DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self:", "GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) ->", "UserName(self: CallerContext) -> str \"\"\" class Constants(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock", "def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str) -> bool \"\"\"", "pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location):", "LicensePlate) -> LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog", "bool \"\"\" pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool \"\"\" pass", "\"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass", "streaming context. \"\"\" pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\"", "def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int) -> (int,Users) \"\"\" pass def GetVersion(self): \"\"\"", "PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) ->", "type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def ZZZ(self):", "Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) ->", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) ->", "-> object \"\"\" pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) -> bool \"\"\" pass", "pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def GenerateNumbers(self,dfObject):", "General) \"\"\" pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data: str) \"\"\" pass def", "DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int \"\"\" pass", "def Info(msg): \"\"\" Info(msg: str) \"\"\" pass @staticmethod def Trace(msg): \"\"\" Trace(msg: str)", "def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def", "\"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass", "delegate that is the new root of the System.MulticastDelegate invocation list. \"\"\" pass", "int) -> ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int) -> str", "bool \"\"\" pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool \"\"\"", "SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data: str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId:", "the current System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) ->", "AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\"", "pass def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg):", "str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self:", "General) \"\"\" pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass def AddTaskLogCleanupTask(self): \"\"\"", "pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\" pass def __init__(self,*args):", "pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings):", "the System.MulticastDelegate invocation list. \"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) ->", "\"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch:", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the list of event handlers that", "NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NumberGeneration()", "ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool \"\"\" pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) ->", "pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) -> (int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\"", "of the class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject):", "DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs)", "def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\"", "def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\"", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls:", "General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans:", "GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str)", "int) -> (int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines)", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda", "the specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in its binary format,false otherwise. \"\"\"", "AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass", "\"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self:", "\"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\"", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) -> object \"\"\" pass def IsProfilerRunning(self): \"\"\"", "General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) ->", "\"\"\" ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey:", "class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\"", "object \"\"\" pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool \"\"\" pass", "instance of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object)", "-> ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\"", "\"\"\" GetDeviceByName(self: General,name: str) -> (bool,Device) \"\"\" pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint:", "str) -> Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str])", "\"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId:", "AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General) \"\"\" pass", "AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self:", "pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\"", "\"\"\" InitializeLifetimeService(self: DocumentQueue) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus:", "\"\"\" Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Messaging() instance=ZZZ()", "OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self:", "XML stream. info: The System.Runtime.Serialization.SerializationInfo instance. context: The streaming context. \"\"\" pass @staticmethod", "\"\"\" CreateModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg:", "RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass", "GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors): \"\"\" GetVendors(self:", "identity,which will cause the object to be assigned a new identity when it", "\"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object \"\"\" pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self:", "System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration", "int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) ->", "def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str) -> (bool,Item) \"\"\" pass def GetItemExists(self,itemCode): \"\"\"", "\"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors)", "object's identity to its clone,which will cause remoting client calls to be routed", "scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. Returns: An System.Data.SchemaSerializationMode", "-> List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) \"\"\"", "pass def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg):", "SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\" pass", "None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: StockManager(self: Outbound) -> IStockManager", "-> List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\"", "class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) -> int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock):", "\"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet:", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer):", "return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...)", "\"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def", "signature \"\"\" pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class", "Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs)", "AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args):", "Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing)", "notification that the specified System.Data.DataSet property is about to change. name: The name", "Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) \"\"\" pass def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs)", "Outbound,batchId: str) \"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool \"\"\"", "SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers:", "DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int)", "GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) \"\"\" pass def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) ->", "about to change. name: The name of the property that is about to", "\"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args:", "pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container): \"\"\"", "\"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId):", "ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool \"\"\" pass", "OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\" pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo", "\"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self:", "GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args:", "\"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\"", "CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders):", "pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList):", "BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings:", "Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) ->", "Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inventory()", "GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams:", "Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int \"\"\" pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\"", "pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock):", "str,itemId: str) -> bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str])", "pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey) \"\"\" pass def DisposeCachedObject(self,hashCode): \"\"\" DisposeCachedObject(self:", "a new System.Delegate without value in its invocation list; otherwise,this instance with its", "General,lock: ErpLock) -> int \"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass", "def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\" pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self:", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging:", "GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\" pass def GetPrintersTable(self): \"\"\" GetPrintersTable(self: General)", "AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\" AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) \"\"\" pass", "DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User])", "CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation:", "DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically invokes (late-bound) the method represented", "(int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass", "def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\" pass def PrintDuplicateLabels(self,args): \"\"\"", "OfflineScanning,filePath: str) -> Task[Stream] \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass", "copy of the current System.Object. \"\"\" pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange])", "GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) ->", "\"\"\"hardcoded/returns an instance of the class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool", "of event handlers that are attached to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda", "pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\"", "\"\"\" pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def", "\"\"\" GetPrintersTable(self: General) -> Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str)", "PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService:", "LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str)", "-> (int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) \"\"\"", "bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool \"\"\"", "that are the arguments to pass to the method represented by the current", "pass def SaveCache(self): \"\"\" SaveCache(self: General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self: General,warehouse:", "System.Data.DataTable is removed from a System.Data.DataSet. table: The System.Data.DataTable being removed. \"\"\" pass", "def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass", "def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders:", "GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) \"\"\" pass def GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages)", "\"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\"", "str) -> Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str) -> Array[Byte]", "\"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def", "General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass def ExecuteCommand(self,command):", "-> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\"", "an instance of the class\"\"\" @staticmethod def Debug(msg): \"\"\" Debug(msg: str) \"\"\" pass", "\"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey", "pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn): \"\"\"", "-> BlobContent \"\"\" pass def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass", "\"\"\" pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self: General,name: str) -> (bool,Device) \"\"\" pass def", "SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) ->", "General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase])", "for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging)", "HasNotificationsArgs) -> bool \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) -> object \"\"\"", "PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass", "\"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) \"\"\" pass def", "has been omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the", "def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) -> List[ICachable] \"\"\" pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self:", "CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) \"\"\" pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self:", "LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\" pass def LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\" pass", "ICentralAuthoritySystem) -> bool \"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\"", "copy of the current System.Object. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs)", "removed from a System.Data.DataSet. table: The System.Data.DataTable being removed. \"\"\" pass def RaisePropertyChanging(self,*args):", "pass class PyLogger(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "pass @staticmethod def __new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer) \"\"\" pass class General(MarshalByRefObject): \"\"\"", "GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode:", "DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User]", "\"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers \"\"\" pass def SendKey(self,endPoint,key): \"\"\" SendKey(self:", "Guid,printer: str,printingOptions: PrintingOptions) -> bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity:", "\"\"\" pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\" pass def", "AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId:", "-> (int,Users) \"\"\" pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) -> (int,Users) \"\"\" pass", "def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\" pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self:", "\"\"\" DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda", "InitializeLifetimeService(self: Outbound) -> object \"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator", "GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\"", "IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects the format of the serialized representation", "pass def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg):", "General,tcpPortNumber: int,unsafe: bool) \"\"\" pass def StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\" pass def", "-> DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass def UpdateDatabase(self,message):", "Stream,overwrite: bool) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) ->", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda self:", "-> (int,ShipperServiceLinks) \"\"\" pass @staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot() -> str \"\"\" pass", "None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda self,v:", "of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}'", "CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool \"\"\" pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self:", "\"\"\" pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def", "DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange]", "\"\"\" pass def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool \"\"\" pass def", "\"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self):", "the current System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder)", "copy of the current System.Object. \"\"\" pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration)", "method represented by the current System.MulticastDelegate. Returns: A static method represented by the", "ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject):", "(bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations)", "\"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\" pass def", "def StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions:", "\"\"\" GetUsersActive(self: General) -> (int,Users) \"\"\" pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) ->", "Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors)", "int,blobName: str) -> Guid \"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) ->", "Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args):", "class ICentralAuthoritySystem: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass", "pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int) -> (bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone):", "str) -> bool \"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool", "\"\"\" pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass def", "\"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self): \"\"\"", "CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General) ->", "(int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str) -> (int,Tags) \"\"\" pass", "UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey:", "def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command:", "\"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid:", "\"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\"", "General,user: User) -> (int,Zones) \"\"\" pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) -> (int,Zones)", "\"\"\" pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def", "CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str)", "\"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) -> bool", "Inventory,batch: Batch) \"\"\" pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate \"\"\"", "GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) ->", "pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass def", "-> TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications)", "Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted from the", "an instance of the class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def", "pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject):", "pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\"", "signature \"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\"", "property is about to change. name: The name of the property that is", "MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str) -> bool \"\"\" pass def", "-> DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder]", "General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str)", "__new__(cls: type,container: IUnityContainer) \"\"\" pass class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\"", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass BosRestBaseUri=property(lambda self:", "AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers:", "\"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str)", "str \"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) ->", "Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int)", "System.ComponentModel.PropertyChangedEventArgs that contains the event data. \"\"\" pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation:", "RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends a notification that the specified System.Data.DataSet property", "pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self):", "GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self:", "GetGeneratedScriptComment(self: General,script: ZoneScript) -> str \"\"\" pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) ->", "doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an", "def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\"", "General,module: PythonModule) -> bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str])", "represented by the current System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound:", "\"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args:", "\"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def", "\"\"\"hardcoded/mock instance of the class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "\"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self:", "int,packages: TransportPackages) -> TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str)", "cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object", "-> (int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\"", "def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg): \"\"\"", "GetPrintersTable(self): \"\"\" GetPrintersTable(self: General) -> Hashtable \"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name:", "(bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\" pass def SaveCache(self): \"\"\"", "GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams:", "\"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass def", "pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool \"\"\" pass", "GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey:", "Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\"", "General() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock:", "ICentralAuthoritySystem) -> bool \"\"\" pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def", "class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x;", "-> DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\"", "pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def", "pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user):", "GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter)", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda self:", "\"\"\" pass @staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\" pass def RemoveDirectOrder(self,args): \"\"\"", "-> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\"", "-> (int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\" pass", "str,toDir: str) -> bool \"\"\" pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\" pass", "class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg):", "is passed during deserialization of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether", "\"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str)", "KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) ->", "of the class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\" pass", "\"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int \"\"\" pass", "def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\"", "SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\" pass def", "def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) -> bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self:", "\"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value:", "class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DoGetAllInstances(self,*args): \"\"\"", "Tables=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Tables(self: DataSet) -> DataTableCollection \"\"\"", "GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items): \"\"\" GetItems(self:", "DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine]", "\"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass", "BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass", "\"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self: General,arg:", "GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs)", "Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey:", "def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) -> int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\"", "GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) ->", "def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses): \"\"\" GetWarehousesAll(self:", "General,arg: ModuleArgs) -> (bool,PythonModule) \"\"\" pass def GetPendingPrintLineCount(self,key): \"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) ->", "NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def", "\"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId:", "License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging): \"\"\"", "Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders)", "CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self:", "self,v: None,lambda self: None) \"\"\"Get: RpRestBaseUri(self: IApplicationSettings) -> str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda", "\"\"\" SyncStock(self: Inventory) \"\"\" pass def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer]", "pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self:", "pass @staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\"", "\"\"\" pass def LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\" pass def LogoutUser(self): \"\"\" LogoutUser(self:", "pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args):", "UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo:", "\"\"\"Get: UserName(self: CallerContext) -> str \"\"\" class Constants(object): # no doc def ZZZ(self):", "\"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\"", "GetNotificationsArgs) -> List[Notification] \"\"\" pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool", "GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str) -> (bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self:", "RaisePropertyChanging(self: DataSet,name: str) Sends a notification that the specified System.Data.DataSet property is about", "an instance of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool)", "NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns", "Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) ->", "Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) ->", "GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) -> List[ICachable] \"\"\" pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General)", "__all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName',", "None,lambda self: None) \"\"\"Get: MailgunApiKey(self: IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v:", "def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\"", "def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool \"\"\" pass def CleanupCacheHistory(self): \"\"\"", "RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\"", "Inventory,cacheKey: CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\"", "str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo:", "\"\"\" pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget) -> (int,Tags) \"\"\" pass def", "IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return General() instance=ZZZ()", "\"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) \"\"\" pass def", "None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self:", "GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\" pass def GetCount(self,*__args): \"\"\" GetCount(self:", "PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self:", "def GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str) -> bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\"", "\"\"\" CleanupUserCacheData(self: General) \"\"\" pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\" pass def", "ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self:", "UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "\"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result):", "-> int \"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass def AddTaskCacheBackgroundTasks(self):", "\"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg): \"\"\" CreateUser(self: General,arg:", "Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\"", "pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject):", "DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self:", "CurrentLicense(self: OfflineScanning) -> License \"\"\" class OnGetDestinationLocationForLine(MulticastDelegate): \"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def", "GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self:", "IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self:", "def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer)", "def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def", "\"\"\" pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\"", "pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "str,countGroupId: int,itemId: str) -> bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item:", "str,itemId: str,isBatchNumber: bool) -> (bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs])", "the class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject): \"\"\"", "GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool \"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self:", "Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) ->", "the class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddNotification(self,notificationToInsert):", "General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs)", "pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass def", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance", "-> (bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str) -> (bool,Zone) \"\"\"", "type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None)", "\"\"\" SendKey(self: General,endPoint: str,key: str) \"\"\" pass def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message:", "GetWarehouseExists(self,warehouseCode): \"\"\" GetWarehouseExists(self: General,warehouseCode: str) -> bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self:", "instance of the class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "\"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table data from the binary or", "-> (bool,Locations) \"\"\" pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int) -> (bool,Device) \"\"\"", "pass def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key):", "DataSet,table: DataTable) Occurs when a System.Data.DataTable is removed from a System.Data.DataSet. table: The", "NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass def GetAllConfigurations(self):", "pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs when a System.Data.DataTable is removed", "pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines):", "def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key): \"\"\"", "\"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args:", "return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self:", "(int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) -> Array[Color] \"\"\" pass def", "CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "\"\"\" pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str) -> bool \"\"\" pass def", "pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) -> bool \"\"\" pass def KillAppDomain(self,*__args): \"\"\"", "DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass", "pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines):", "bool \"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int \"\"\"", "pass def GetDeviceInformation(self,endPoint,deviceInfo): \"\"\" GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices):", "def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\" pass def __init__(self,*args): \"\"\"", "WarehouseTransferItems) -> bool \"\"\" pass def SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\" pass def", "def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches): \"\"\"", "def StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe:", "\"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg:", "A shallow copy of the current System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self:", "LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation):", "AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def AttachClient(self,endPoint): \"\"\"", "\"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\" pass def", "def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda self: object(),lambda self,v: None,lambda self: None)", "pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor:", "int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) ->", "__new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object): \"\"\" ExceptionHelper()", "from System.Collections.Generic import * from ..__init__ import * # no functions # classes", "pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\"", "CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs])", "pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] \"\"\" pass def EnsureLicenseExists(self):", "KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\" pass def LoadCache(self): \"\"\"", "-> bool \"\"\" pass def SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\" pass def TransferItems(self,arg):", "\"\"\" GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId:", "str) \"\"\" pass @staticmethod def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\" pass", "\"\"\" GetSettings(self: General) -> SystemSettings \"\"\" pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) ->", "def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet)", "\"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange \"\"\" pass def", "def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\"", "DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls:", "the class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self): \"\"\"", "\"\"\"hardcoded/mock instance of the class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "OnPythonEngineBooted(self: General) \"\"\" pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass def PrintPrintLine(self,line,label):", "of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been", "def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages): \"\"\"", "\"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey", "GetSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) ->", "\"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def", "str) -> (bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations)", "RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer:", "Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass", "General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\" StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass def", "CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self: General,script: ZoneScript) -> str \"\"\" pass def GetImplementedMethods(self): \"\"\"", "pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str) -> (bool,User) \"\"\" pass def GetUserCacheData(self,tag):", "def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\"", "DataTable) Occurs when a System.Data.DataTable is removed from a System.Data.DataSet. table: The System.Data.DataTable", "A shallow copy of the current System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self:", "def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda self: object(),lambda", "Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) ->", "General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals): \"\"\" GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) ->", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\" DataSet() \"\"\" def", "def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] \"\"\" pass def ReceiveItemIdRange(self,dfObject): \"\"\"", "GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable])", "pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\"", "of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\"", "SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool \"\"\" pass def SaveSetting(self,memberName,value): \"\"\"", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: General) -> License Set: CurrentLicense(self: General)=value", "pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo):", "\"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs when a System.Data.DataTable is removed from a System.Data.DataSet.", "CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask])", "def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg): \"\"\"", "pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch):", "def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg:", "\"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) ->", "pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self):", "(int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings:", "SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass def StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken)", "the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool \"\"\" pass", "def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\"", "pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) -> bool \"\"\" pass def ShouldSerializeTables(self,*args): \"\"\"", "pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self:", "str) -> (int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\"", "General) -> ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode: str) -> (bool,Item)", "def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\"", "\"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\" pass def", "\"\"\" pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink):", "int,testRun: bool) \"\"\" pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent):", "str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count) \"\"\" pass def", "see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see", "General) -> (bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations)", "the table data from the binary or XML stream. info: The System.Runtime.Serialization.SerializationInfo instance.", "ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs)", "SendMessage(self: General,endPoint: str,message: str) \"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x: int,y:", "GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) ->", "the format of the serialized representation of the DataSet. info: The System.Runtime.Serialization.SerializationInfo object.", "\"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) ->", "GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) ->", "bool \"\"\" pass def SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\" pass def TransferItems(self,arg): \"\"\"", "AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs)", "GCloudProjectId(self: IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args:", "Mappings[str,str,str]) -> bool \"\"\" pass def SaveSetting(self,memberName,value): \"\"\" SaveSetting(self: General,memberName: str,value: object) \"\"\"", "DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass", "def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\" pass def GetMethodImpl(self,*args): \"\"\"", "\"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) ->", "DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass", "AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\"", "str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode:", "\"\"\" pass def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def", "-> FindableList[ItemStockWithLocations] \"\"\" pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\" pass", "\"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds:", "info: The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object. Returns: true if the specified", "cause remoting client calls to be routed to the remote server object. Returns:", "CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId): \"\"\"", "-> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass", "the class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args):", "GetVersion(self: General) -> str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str) ->", "x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass", "NotificationSummary) -> List[str] \"\"\" pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\"", "pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass def", "pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self:", "self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda self: object(),lambda", "GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) ->", "DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int)", "GetCacheObject(self: General,hashCode: int) -> ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int)", "UpdateMessage(self,message): \"\"\" UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "System.Data.DataSet property is about to change. name: The name of the property that", "\"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs)", "CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda", "DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self:", "IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CachedSettings(self:", "ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass", "-> List[str] \"\"\" pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\" pass", "DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\"", "GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet])", "pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\"", "List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes:", "def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) -> (int,Devices) \"\"\" pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self:", "Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs)", "def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers):", "\"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args:", "for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\"", "-> PrintRule \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "-> str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings)", "def __str__(self,*args): pass Events=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the list", "UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference:", "GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\"", "Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject:", "AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange])", "\"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool \"\"\"", "__new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def", "GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass def GetMacAddress(self): \"\"\" GetMacAddress(self: General) -> str \"\"\"", "method represented by the current System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line:", "class\"\"\" def AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName):", "Guid \"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule \"\"\" pass", "AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass def", "-> List[str] \"\"\" pass def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid) -> IMessage \"\"\"", "str \"\"\" pass UserName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: UserName(self: CallerContext)", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda self:", "Zone) -> (bool,RemotingIdentity) \"\"\" pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str) \"\"\" pass", "\"\"\" Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Mailer() instance=ZZZ()", "General,warehouseCode: str,warehouseLocationCode: str) -> bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode:", "(int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass", "def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule \"\"\" pass def __init__(self,*args): \"\"\"", "GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\" pass @staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot()", "GetLibRoot() -> str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification)", "GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str])", "pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self: General,name: str) -> (bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights):", "\"\"\" pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\" pass def GetErpName(self):", "Outbound) -> (int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService])", "IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self:", "DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass", "General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone]) ->", "removed. \"\"\" pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends a notification that", "a remoting boundary. A value of false is usually appropriate. true to copy", "RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule \"\"\" pass", "def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\"", "General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs) ->", "\"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\"", "Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) ->", "pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation):", "\"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg:", "bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location \"\"\"", "System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The System.Xml.XmlReader instance that is passed during deserialization", "def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\"", "def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\"", "(int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\" pass def", "\"\"\" pass def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def", "\"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool \"\"\" pass def", "\"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetServerDate(self):", "General,name: str) -> (bool,Zone) \"\"\" pass def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int) ->", "pass @staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\" pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self:", "General) \"\"\" pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass def AddUserToZone(self,zone,user): \"\"\"", "InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) -> object \"\"\" pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning)", "GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory)", "def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\"", "CloseBatchForPickingById(self: Outbound,id: str) -> bool \"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey)", "def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self:", "SerializationInfo,context: StreamingContext) -> bool Inspects the format of the serialized representation of the", "Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) ->", "\"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key:", "List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\" pass", "\"\"\" pass def GetPrinterRules(self,args): \"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass def", "the new root of the System.MulticastDelegate invocation list. \"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\"", "DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine]", "PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs])", "int) -> str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool)", "\"\"\" pass def GetPrintLabelByName(self,name,label): \"\"\" GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) \"\"\" pass def", "(int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\" pass def DeleteBatches(self,batchesToDelete):", "General) \"\"\" pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass def PrintPrintLine(self,line,label): \"\"\"", "Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent \"\"\" pass def GetFileTypes(self): \"\"\" GetFileTypes(self:", "\"\"\" GetDeviceById(self: General,id: int) -> (bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress:", "\"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass def DequeueNextMessage(self): \"\"\"", "(int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\" pass def", "-> Publisher \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def GetRemotePublishers(self):", "return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def Debug(msg): \"\"\"", "Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch',", "GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode:", "RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return RemotePublishing()", "int) \"\"\" pass def DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass def DeleteReplenishmentOrder(self,order):", "General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) ->", "of the class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class CallerContext(object): \"\"\"", "General,filter: str) \"\"\" pass def LoadCache(self): \"\"\" LoadCache(self: General) \"\"\" pass def LoadSettings(self,*__args):", "invocation list for this instance,then a new System.Delegate without value in its invocation", "-> int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings)", "Outbound,barcode: str) -> (bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) -> (int,Batches)", "of the serialized representation of the DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context: The", "GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) -> (int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter:", "GetCustomersArgs) -> (int,Customers) \"\"\" pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\"", "General,accessId: str) -> Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str) ->", "pass def DeleteScript(self,arg): \"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg):", "pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines):", "PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) ->", "\"\"\" pass def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key: str) \"\"\" pass def SendMessage(self,endPoint,message):", "General) -> (int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript)", "\"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey", "def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self:", "AddPrintJob(self,args): \"\"\" AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self:", "def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\"", "def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\"", "\"\"\" pass def SaveCache(self): \"\"\" SaveCache(self: General) \"\"\" pass def SaveDefaultInboundLocation(self,warehouse): \"\"\" SaveDefaultInboundLocation(self:", "General,locationClassification: LocationClassification) -> (int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) ->", "str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders)", "import * from ..__init__ import * # no functions # classes class AppHost(object):", "str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str)", "-> (int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\"", "\"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self:", "\"\"\" pass def AddDirectOrderLineItemIdentification(self,args): \"\"\" AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def", "\"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers \"\"\" pass def SendKey(self,endPoint,key):", "\"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int)", "def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self:", "self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing) -> License \"\"\" # variables with complex values", "\"\"\" __repr__(self: object) -> str \"\"\" pass UserName=property(lambda self: object(),lambda self,v: None,lambda self:", "\"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key:", "\"\"\"hardcoded/returns an instance of the class\"\"\" def Clone(self): \"\"\" Clone(self: DataSet) -> DataSet", "pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool \"\"\" pass def", "calls to be routed to the remote server object. Returns: A shallow copy", "General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs)", "pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x: int,y: int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines):", "only unmanaged resources. \"\"\" pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\"", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,object,method):", "\"\"\"hardcoded/mock instance of the class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue)", "Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) ->", "DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey)", "..__init__ import * # no functions # classes class AppHost(object): \"\"\" AppHost() \"\"\"", "__new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda self: object(),lambda self,v: None,lambda", "pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\" pass def", "str) -> (bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal)", "DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass", "list for this instance,then a new System.Delegate without value in its invocation list;", "PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int)", "AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General)", "pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args):", "class IApplicationSettings: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "-> (bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications)", "GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams:", "GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) ->", "\"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool \"\"\" pass", "GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self:", "(int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\" pass def", "is invoked with during deserialization in remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating", "GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\"", "CacheKey,itemCode: str,quantity: Decimal) -> bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items:", "(int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\"", "DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\" pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference:", "Outbound,batchesToDelete: Batches) -> bool \"\"\" pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\"", "\"\"\" pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass def", "\"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass def", "def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args:", "CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) ->", "\"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key:", "def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\" pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str)", "\"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass def", "General) \"\"\" pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers):", "None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self:", "Type) -> str \"\"\" pass def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str)", "def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag): \"\"\"", "CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self:", "General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) ->", "def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self):", "Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) ->", "PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs])", "pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int) -> (int,Users) \"\"\" pass def GetVersion(self):", "-> bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages)", "GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args:", "current System.MulticastDelegate. Returns: A static method represented by the current System.MulticastDelegate. \"\"\" pass", "Outbound,batch: Batch) -> Batch \"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value:", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance", "GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList)", "current System.Object. \"\"\" pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\" pass def", "Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key:", "instance of the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\" pass", "SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) \"\"\" pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs)", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns an instance", "GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) \"\"\" pass @staticmethod def", "def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\"", "def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs", "None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda self,v: None,lambda self:", "DeleteNotification(self: NotificationCenter,notificationId: int) \"\"\" pass def DeleteNotificationGroup(self,notificationGroup): \"\"\" DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) \"\"\" pass", "'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot',", "def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates):", "str) Sends a notification that the specified System.Data.DataSet property is about to change.", "GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetPrintAllocationSettings(): \"\"\"", "def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod def Info(msg): \"\"\"", "\"\"\"hardcoded/mock instance of the class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "(int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass", "RemotingIdentity) -> bool \"\"\" pass @staticmethod def WrapException(ex): \"\"\" WrapException(ex: Exception) -> RemotingException", "Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order:", "the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting", "int) -> str \"\"\" pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) -> str \"\"\"", "-> (int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones)", "class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate \"\"\" pass def", "def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) \"\"\" pass def", "__exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args):", "CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs)", "DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass def", "\"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass", "CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\"", "DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass", "GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool) -> (int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self:", "def WrapException(ex): \"\"\" WrapException(ex: Exception) -> RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container:", "\"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation) \"\"\" pass", "\"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\" pass def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) ->", "pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self:", "def LogoutClient(self): \"\"\" LogoutClient(self: General) \"\"\" pass def LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\"", "GetMessage(self: Messaging,messageId: Guid) -> IMessage \"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs:", "\"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) ->", "(Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) ->", "PrintDuplicateLabelArgs) -> bool \"\"\" pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool", "specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in its binary format,false otherwise. \"\"\" pass", "-> (bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y] \"\"\" pass def", "class\"\"\" @staticmethod def Debug(msg): \"\"\" Debug(msg: str) \"\"\" pass @staticmethod def Error(*__args): \"\"\"", "that are attached to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self:", "def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult \"\"\"", "str \"\"\" RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) ->", "def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass def CreateLicensePlate(self,lp): \"\"\" CreateLicensePlate(self: Inventory,lp:", "\"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str)", "pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors):", "bool) \"\"\" pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass def StopProfiler(self): \"\"\"", "-> bool \"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications \"\"\"", "GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id:", "self,v: None,lambda self: None) class IApplicationSettings: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance", "\"\"\" DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General)", "DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask])", "StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass def StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\" pass", "form a new delegate. follow: The delegate to combine with this delegate. Returns:", "DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count])", "ExecuteSummaries(self: NotificationSummary) \"\"\" pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance", "\"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def", "\"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self:", "List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) \"\"\" pass", "\"\"\"Get: Relations(self: DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None)", "def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question:", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an", "instance of the class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass def", "shallow copy of the current System.Object. \"\"\" pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject:", "GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\" pass def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs)", "InitializeLifetimeService(self: OfflineScanning) -> object \"\"\" pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool", "\"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\"", "BosRestBaseUri(self: IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance", "class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args):", "DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device])", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str) -> str \"\"\" pass def ExecuteScript(self,script): \"\"\"", "\"\"\"hardcoded/mock instance of the class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "-> List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass", "DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def DeleteZone(self,arg): \"\"\" DeleteZone(self: General,arg: DataFlowObject[Zone])", "\"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object): # no doc def ZZZ(self):", "Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id: str) -> bool \"\"\"", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) -> object \"\"\" pass def MemberwiseClone(self,*args):", "\"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] \"\"\" pass def", "return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\"", "\"\"\" CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line:", "DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask]", "Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\"", "for signature \"\"\" pass def __repr__(self,*args): \"\"\" __repr__(self: object) -> str \"\"\" pass", "GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs])", "\"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass def", "IntPtr) \"\"\" pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\"", "GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs)", "pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) -> bool \"\"\" pass def __enter__(self,*args): \"\"\"", "(int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass", "str) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\"", "-> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment \"\"\"", "OfflineScanning) -> object \"\"\" pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\"", "Printing(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Printing() instance=ZZZ()", "and optionally releases the managed resources. disposing: true to release both managed and", "RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "\"\"\" GetUsersAll(self: General) -> (int,Users) \"\"\" pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) ->", "str) -> (bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages)", "def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions):", "(int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\"", "GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self:", "CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count])", "General) \"\"\" pass def CompileScript(self,script): \"\"\" CompileScript(self: General,script: str) -> List[PythonError] \"\"\" pass", "\"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def", "def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure):", "GetSessions(self,sessions): \"\"\" GetSessions(self: General) -> (int,Sessions) \"\"\" pass def GetSettings(self): \"\"\" GetSettings(self: General)", "instance of the class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\"", "\"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\" pass", "Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule):", "str) -> bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal)", "GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter:", "-> bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) ->", "SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\"", "-> (bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\"", "\"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass", "-> (int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\"", "General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum)", "pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations):", "of the class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "\"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter:", "-> bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) ->", "return Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self:", "GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id:", "IApplicationSettings) -> str \"\"\" class ICentralAuthoritySystem: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance", "def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self:", "\"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\" pass def", "NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId:", "General,xml: str) -> (bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) ->", "(DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\" pass", "Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId:", "Exception)Error(ex: BaseException) \"\"\" pass @staticmethod def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\"", "class CallerContext(object): \"\"\" CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "Returns: The object returned by the method represented by the delegate. \"\"\" pass", "GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext]", "context: The System.Runtime.Serialization.StreamingContext object. Returns: true if the specified System.Runtime.Serialization.SerializationInfo represents a DataSet", "self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: StockManager(self:", "str) -> bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) ->", "of the class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "\"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass", "str) -> (int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int) -> (bool,User)", "def GetCount(self,*__args): \"\"\" GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self:", "RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule", "Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): \"\"\" PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom:", "\"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass def GetBlobContent(self,blobId): \"\"\"", "pass @staticmethod def Fatal(*__args): \"\"\" Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod def", "IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Mailer(object):", "GetWarehousesActive(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) ->", "(int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass", "def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts):", "NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\"", "BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode:", "pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets):", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing) -> License \"\"\" # variables", "PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "to change. \"\"\" pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass def", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns an", "GetUsersAll(self: General) -> (int,Users) \"\"\" pass def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) -> (int,Users)", "itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 # no doc #", "CacheKey) -> int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase)", "object \"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object \"\"\"", "CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) ->", "-> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\"", "def GetVersion(self): \"\"\" GetVersion(self: General) -> str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self:", "def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\" pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self:", "\"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass def CreateLicensePlate(self,lp): \"\"\"", "UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs)", "pass def GetMacAddress(self): \"\"\" GetMacAddress(self: General) -> str \"\"\" pass def GetModule(self,arg,module): \"\"\"", "int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\" pass def", "\"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\"", "the current delegate. args: An array of objects that are the arguments to", "for a System.Data.DataSet. reader: The System.Xml.XmlReader instance that is passed during deserialization of", "GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int) -> (int,Users) \"\"\" pass def GetVersion(self): \"\"\" GetVersion(self:", "ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class CallerContext(object): \"\"\" CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self):", "General) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses)", "(int,ScriptTasks) \"\"\" pass def GetServerDate(self): \"\"\" GetServerDate(self: General) -> DateTime \"\"\" pass def", "SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\" pass", "str) -> (bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str) ->", "DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def OpenBatchesForPacking(self,args,customers): \"\"\" OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers)", "pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts):", "None,lambda self: None) \"\"\"Get: Tables(self: DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None", "str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom:", "\"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg:", "property that is about to change. \"\"\" pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader:", "def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\"", "Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) \"\"\"", "-> bool \"\"\" pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\" pass def CleanupUserCacheData(self):", "CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg): \"\"\" CreateZone(self:", "DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DiscardPrintLines(self,key): \"\"\" DiscardPrintLines(self: General,key: CacheKey)", "def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): \"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str)", "\"\"\" pass class IExtendedServiceLocator: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the", "-> (int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) \"\"\"", "\"\"\" pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) \"\"\" pass def", "General,isFile: bool,name: str,fromDir: str,toDir: str) -> bool \"\"\" pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self:", "-> DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\"", "pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams):", "(int,Users) \"\"\" pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) -> (int,Users) \"\"\" pass def", "\"\"\" InitializeLifetimeService(self: Inventory) -> object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup:", "Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode:", "CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting])", "Trace(msg): \"\"\" Trace(msg: str) \"\"\" pass @staticmethod def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex:", "\"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup:", "GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders:", "(int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass", "General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) ->", "pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General)", "ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval',", "Inventory,key: CacheKey,itemId: str) -> bool \"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey)", "\"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass def", "\"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) \"\"\" pass def", "General,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OfflineScanning() instance=ZZZ()", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "\"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str", "def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines):", "\"\"\" pass @staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args):", "def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) \"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\"", "RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\"", "-> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None", "pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\"", "GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) ->", "self: None) \"\"\"Get: UserName(self: CallerContext) -> str \"\"\" class Constants(object): # no doc", "def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\"", "ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] \"\"\" pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId:", "bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a", "\"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General)", "-> (int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\"", "None,lambda self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing) -> License \"\"\" # variables with complex", "(bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass", "\"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args:", "\"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound)", "Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) ->", "str) \"\"\" pass @staticmethod def Error(*__args): \"\"\" Error(msg: str)Error(ex: Exception)Error(ex: BaseException) \"\"\" pass", "\"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda", "str) \"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self: General,endPoint: str,x: int,y: int) \"\"\" pass", "GetServerDate(self: General) -> DateTime \"\"\" pass def GetSessions(self,sessions): \"\"\" GetSessions(self: General) -> (int,Sessions)", "def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self:", "\"\"\" pass def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args): \"\"\"", "(int,ItemLocations) \"\"\" pass def UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass", "GetAppVersionFileSpecArgs) -> str \"\"\" pass def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\"", "pass def GetMatchingPrintRules(self,attributes): \"\"\" GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\" pass def GetOperators(self):", "RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\" pass def", "(OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass", "Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool \"\"\" pass def SyncStock(self): \"\"\" SyncStock(self: Inventory) \"\"\"", "\"\"\" pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\" pass def", "(int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int) -> (bool,Tag) \"\"\" pass", "pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages):", "Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrdersAll(self,salesOrders): \"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders)", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: Relations(self: DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self:", "Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\" pass def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid) ->", "str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo:", "\"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self:", "GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) \"\"\" pass def GetBackgroundAgentsAll(self,agents): \"\"\" GetBackgroundAgentsAll(self:", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock)", "pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items):", "def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\"", "\"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self:", "\"\"\" pass def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) -> Scanners \"\"\" pass def InitializeLifetimeService(self):", "def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode):", "GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str \"\"\"", "shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a", "\"\"\" GetCacheObject(self: General,hashCode: int) -> ICachable \"\"\" pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode:", "\"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass", "self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\" Options=property(lambda self: object(),lambda self,v: None,lambda", "MarshalByValueComponent,disposing: bool) Releases the unmanaged resources used by the System.ComponentModel.MarshalByValueComponent and optionally releases", "def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject): \"\"\"", "\"\"\" pass def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged resources used", "AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\" pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self:", "pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions):", "pass @staticmethod def GetLibRoot(): \"\"\" GetLibRoot() -> str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\"", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DeleteConfiguration(self,notificationSummaryId): \"\"\" DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int)", "DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag]", "the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass def AddNotificationGroup(self,notificationGroup): \"\"\"", "the class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddDirectOrder(self,args):", "DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None", "UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs])", "None,lambda self: None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines:", "CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self:", "\"\"\"hardcoded/mock instance of the class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "-> DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\"", "DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool", "(int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\" pass def", "GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound)", "def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications):", "General,warehouseCode: str) -> (bool,Locations) \"\"\" pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int) ->", "ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int) -> object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self:", "class DataSet(DataSet): \"\"\" DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "-> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ProcessPendingReceiveLines(self,dfObject): \"\"\" ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] \"\"\"", "SendBroadcastMessage(self: General,message: str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) ->", "str) -> object \"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) ->", "1.145 # no doc # no important from System.Collections.Generic import * from ..__init__", "-> int \"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) \"\"\"", "DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Relations(self:", "pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message):", "CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes", "pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) \"\"\" pass def", "XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass def", "str) -> (bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs]", "Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str) ->", "pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table data from", "(int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int) -> CountGroup \"\"\" pass", "pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass def GetItemStockTotals(self,args,totals):", "DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers)", "-> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass def", "Releases the unmanaged resources used by the System.ComponentModel.MarshalByValueComponent and optionally releases the managed", "self,v: None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda self: object(),lambda", "of the class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "\"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass def", "\"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args:", "ErpLock) -> int \"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass def", "IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) ->", "doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns an", "def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args): \"\"\"", "(int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass", "-> int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\"", "\"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer]", "of the current System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir:", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\"", "def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result):", "IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns", "def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str) -> bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\"", "GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) ->", "-> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments)", "GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) ->", "\"\"\" OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key:", "Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) ->", "def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass @staticmethod def RemoveBatch(batch):", "General,endPoint: str) -> (bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) -> (int,Devices)", "has been omitted from the payload. \"\"\" pass def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing:", "-> DataFlowObject[CountGroup] \"\"\" pass def DeleteLicensePlateById(self,licensePlateId): \"\"\" DeleteLicensePlateById(self: Inventory,licensePlateId: int) \"\"\" pass def", "\"\"\" InitializeLifetimeService(self: Outbound) -> object \"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) ->", "combine with this delegate. Returns: A delegate that is the new root of", "pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool \"\"\" pass def", "pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): \"\"\" MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str) -> bool \"\"\"", "pass def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs) -> bool \"\"\" pass def UploadNewLicense(self,xml,license):", "delegate. args: An array of objects that are the arguments to pass to", "\"\"\" GetPickListsTable(self: Printing) -> Hashtable \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) ->", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService:", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable \"\"\" Relations=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "\"\"\" pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def", "def Trace(msg): \"\"\" Trace(msg: str) \"\"\" pass @staticmethod def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex:", "\"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args:", "DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\" PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs]", "DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) ->", "(bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\"", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,container):", "GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\" pass def GetErpName(self): \"\"\" GetErpName(self: General)", "InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg:", "def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\"", "pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg):", "DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self:", "pass def BeepContinuous(self,endPoint): \"\"\" BeepContinuous(self: General,endPoint: str) \"\"\" pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self:", "class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool \"\"\" pass @staticmethod", "SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveModule(self,module): \"\"\" SaveModule(self: General,module:", "None,lambda self: None) \"\"\"Get: CurrentLicense(self: General) -> License Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda", "UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\" pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str)", "def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\"", "pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg):", "DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self: Inventory,lp: LicensePlate) \"\"\" pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo):", "CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]])", "current System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def", "def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid \"\"\" pass def", "System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\"", "CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId:", "def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self:", "List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\" pass", "\"\"\" GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy:", "\"\"\" GetCountriesActive(self: General) -> (int,Countries) \"\"\" pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) ->", "RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\"", "def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) -> Hashtable \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs)", "(int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\"", "of the class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod", "GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General)", "GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self:", "FilterOptions) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str) \"\"\"", "\"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass", "\"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str \"\"\" pass", "\"\"\" pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def", "Set: CurrentLicense(self: General)=value \"\"\" DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self:", "str \"\"\" pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) -> (int,Users) \"\"\" pass def", "this System.MulticastDelegate that is equal to the specified delegate. value: The delegate to", "\"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages:", "instance of the class\"\"\" def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self):", "None,lambda self: None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v:", "-> DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]]", "str) -> (bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) -> (int,Devices) \"\"\"", "pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) -> (int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\"", "General) -> (int,Sessions) \"\"\" pass def GetSettings(self): \"\"\" GetSettings(self: General) -> SystemSettings \"\"\"", "\"\"\" pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool \"\"\" pass def", "\"\"\" pass def GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\" pass def", "def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self:", "\"\"\" pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent \"\"\" pass def", "def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\"", "CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg): \"\"\" CreateScript(self:", "CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self:", "copy of the current System.Object. Returns: A shallow copy of the current System.Object.", "SystemSettings \"\"\" pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def", "\"\"\" pass Messaging=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v:", "def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool \"\"\"", "pass def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def CreateUser(self,arg):", "int) -> (int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\"", "pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def", "GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\" pass def HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs)", "GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): \"\"\" GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args:", "GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers)", "PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs)", "GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) -> Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject:", "System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass", "def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\"", "The System.Runtime.Serialization.StreamingContext object. Returns: true if the specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized", "to search for in the invocation list. Returns: If value is found in", "AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program", "PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "of the class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "an instance of the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) ->", "object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class", "-> OrderMatchesCustomerValidator \"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) ->", "class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\"", "\"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool \"\"\" pass", "a static method represented by the current System.MulticastDelegate. Returns: A static method represented", "str) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable", "event handlers that are attached to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v:", "str) -> List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\"", "def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) -> (int,Users) \"\"\" pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self:", "(bool,DeviceInformation) \"\"\" pass def GetDevicesAll(self,devices): \"\"\" GetDevicesAll(self: General) -> (int,Devices) \"\"\" pass def", "General) \"\"\" pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool \"\"\"", "pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) \"\"\" pass def ScanItemForPacking(self,args,result):", "str) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance", "GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs)", "\"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) \"\"\" pass def", "CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone])", "-> (int,Warehouses) \"\"\" pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\"", "pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass def GetRmaOrdersAll(self,rmaOrders):", "Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs]", "DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) ->", "None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v:", "def PickItemIdInBatch(self,dfObject): \"\"\" PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PickItemIdRangeInBatch(self,dfObject): \"\"\"", "PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args): \"\"\" PrintPickList(self:", "int) -> (int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int)", "GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self:", "RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|'", "str,date: DateTime,ledgerCode: str) -> bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order:", "\"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool \"\"\" pass def", "The name of the property that is about to change. \"\"\" pass def", "Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged resources used by the System.ComponentModel.MarshalByValueComponent and optionally", "of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool", "-> CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo:", "Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs)", "calls itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 # no doc", "pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines):", "-> (bool,Item) \"\"\" pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str) -> bool \"\"\"", "ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self:", "GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self:", "DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs)", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,implementationContainer=None): \"\"\"", "__new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v:", "-> (int,ItemLocations) \"\"\" pass def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items)", "\"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def", "\"\"\" Info(msg: str) \"\"\" pass @staticmethod def Trace(msg): \"\"\" Trace(msg: str) \"\"\" pass", "PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs)", "utf-8 # module Wms.RemotingImplementation calls itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator", "\"\"\" pass def DeleteBatches(self,batchesToDelete): \"\"\" DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool \"\"\" pass def", "ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str) -> bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines):", "ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs)", "deserialization of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has", "Exception) -> RemotingException \"\"\" pass class ExtendedUnityServiceLocator(UnityServiceLocator): \"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self):", "-> (bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\"", "instance of the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "an instance of the class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object]", "\"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass def", "GetBatchById(self,id,cacheKey,batch): \"\"\" GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self:", "str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) \"\"\" pass", "CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) -> bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml:", "CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs])", "-> object \"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal)", "encoding: utf-8 # module Wms.RemotingImplementation calls itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by", "GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\" pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing)", "DocumentQueue,printRuleId: int) -> PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\"", "from the payload. \"\"\" pass def Dispose(self): \"\"\" Dispose(self: MarshalByValueComponent,disposing: bool) Releases the", "str \"\"\" pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) -> str \"\"\" pass def", "self,v: None,lambda self: None) \"\"\"Get: CachedSettings(self: General) -> SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda", "\"\"\" Printing(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Printing()", "General,id: int) -> (bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str) ->", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self): \"\"\"", "-> AllocationSettings \"\"\" pass @staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass", "Returns: A shallow copy of the current System.Object. \"\"\" pass def SetCurrentAppVersion(self,args): \"\"\"", "bool \"\"\" pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\" pass def OutputCacheStatusToLog(self): \"\"\"", "CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self:", "\"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\" pass def", "the class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] \"\"\" pass def", "-> bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label): \"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool", "-> (int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str) -> (int,Tags) \"\"\"", "int) -> (bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\"", "ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders)", "CleanupCacheHistory(self: General) \"\"\" pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\" pass def ClearResourceCache(self):", "GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) ->", "(int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\"", "PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self:", "releases the managed resources. disposing: true to release both managed and unmanaged resources;", "def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\"", "def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\"", "StartMessageQueueListener(self,cancellationToken): \"\"\" StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task \"\"\" pass def UpdateMessage(self,message): \"\"\" UpdateMessage(self:", "DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None", "AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid \"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self:", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class NotificationCenter(MarshalByRefObject):", "if the specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in its binary format,false otherwise.", "class IExtendedServiceLocator: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" SetUserCacheData(self: General,tag: str,data: str) \"\"\" pass def SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights:", "(bool,CacheKey,Batch) \"\"\" pass def GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) \"\"\" pass", "class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class CallerContext(object): \"\"\" CallerContext() \"\"\"", "int) -> (bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications)", "-> (bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\" pass def SaveCache(self):", "General) -> (int,Countries) \"\"\" pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\"", "GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey:", "Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int)", "signature \"\"\" pass def __repr__(self,*args): \"\"\" __repr__(self: object) -> str \"\"\" pass UserName=property(lambda", "None) \"\"\"Get: Tables(self: DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None", "\"\"\" pass @staticmethod def __new__(self,stockManager,messaging): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda", "-> (int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\"", "CreateSnippetModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification])", "def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\" pass def GetGeneratedScriptComment(self,script): \"\"\" GetGeneratedScriptComment(self:", "def ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg):", "ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object \"\"\" pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg:", "pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg):", "GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self:", "str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) -> bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self:", "def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for", "General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\" pass def GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs)", "-> Array[str] \"\"\" pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\"", "GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass def GetPreReceiptLines(self,args,lines): \"\"\" GetPreReceiptLines(self:", "pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args):", "LicensePlate) -> LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum)", "StreamingContext) \"\"\" pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda self: object(),lambda self,v:", "the class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateErpLock(self,lock):", "General,question: str,possibleAnswers: int) -> Answers \"\"\" pass def SendKey(self,endPoint,key): \"\"\" SendKey(self: General,endPoint: str,key:", "\"\"\" pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\"", "GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): \"\"\" GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self:", "General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime:", "LicenseAppVersion \"\"\" pass def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) -> Scanners \"\"\" pass def", "self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda", "Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes:", "PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self:", "pass def GetActiveColliPresets(self,colliPresets): \"\"\" GetActiveColliPresets(self: General) -> (int,ColliPresets) \"\"\" pass def GetAppDomainList(self): \"\"\"", "GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str) -> Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self:", "\"\"\" ShouldSerializeRelations(self: DataSet) -> bool \"\"\" pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) ->", "GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId:", "\"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) -> int \"\"\" pass def AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock:", "for signature \"\"\" pass @staticmethod def __new__(self): \"\"\" __new__(cls: type) __new__(cls: type,info: SerializationInfo,context:", "pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg):", "(int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones) \"\"\"", "for in the invocation list. Returns: If value is found in the invocation", "pass __all__=[ 'ConvertTo', ] class CallerContext(object): \"\"\" CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "Clone(self): \"\"\" Clone(self: DataSet) -> DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info:", "\"\"\"hardcoded/mock instance of the class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\" pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\" pass", "an instance of the class\"\"\" def CreateContainer(self): \"\"\" CreateContainer(self: AppHost) -> UnityContainer \"\"\"", "Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) ->", "IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self:", "ChangeBarcodeArgs) -> bool \"\"\" pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) -> bool \"\"\"", "DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) ->", "ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject):", "pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def", "GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode:", "CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass", "GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self:", "(bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\"", "AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs)", "GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str)", "GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self: General,name: str)", "HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) ->", "\"\"\" GetCountGroupsAll(self: Inventory) -> (int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int)", "int) -> HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) ->", "General,filter: str) -> (int,Tags) \"\"\" pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget) ->", "str) -> (bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\"", "General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\"", "CreateShipperServiceLink(self,arg): \"\"\" CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self:", "-> (bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\" pass", "DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey]", "GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\" pass def GetUsedPrintJobTypes(self): \"\"\" GetUsedPrintJobTypes(self: DocumentQueue) ->", "RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] \"\"\"", "MailgunApiKey(self: IApplicationSettings) -> str \"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) ->", "def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): \"\"\" PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey \"\"\"", "object(),lambda self,v: None,lambda self: None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\" def", "\"\"\" ExtendedUnityServiceLocator(container: IUnityContainer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExtendedUnityServiceLocator()", "def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\"", "Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams)", "DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str) -> str", "GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\" pass def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid)", "PrintRule) -> PrintRule \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "OnRemoveTable(self: DataSet,table: DataTable) Occurs when a System.Data.DataTable is removed from a System.Data.DataSet. table:", "\"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\"", "\"\"\" pass def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y] \"\"\" pass def __init__(self,*args): \"\"\"", "class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddPrintJob(self,args): \"\"\"", "\"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool \"\"\" pass", "def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int \"\"\" pass def CancelProcessCounts(self):", "-> (int,ItemStockAllocationList) \"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs)", "List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) \"\"\" pass", "CountGroup) -> (int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations)", "NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationSummary()", "Trace(msg: str) \"\"\" pass @staticmethod def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) \"\"\"", "GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] \"\"\" pass def GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs)", "-> DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\"", "-> (int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\" pass", "General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag): \"\"\" GetTagById(self: General,id: int) ->", "DataSet serialized in its binary format,false otherwise. \"\"\" pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self:", "\"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes):", "General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber:", "class Constants(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "-> (int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\"", "GetNumberRangeArgs) -> List[NumberRange] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NumberGeneration) -> object \"\"\"", "in its invocation list; otherwise,this instance with its original invocation list. \"\"\" pass", "ModuleArgs) -> bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification]", "pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) \"\"\" pass def GetWarehouseExists(self,warehouseCode):", "AddRemotePublisherArgs) -> Publisher \"\"\" pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass", "int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass", "\"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey): \"\"\" DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg): \"\"\"", "self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str \"\"\" PdfPrintNetLicenseKey=property(lambda self: object(),lambda", "NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args:", "\"\"\" IsBosInboundListenerRunning(self: OfflineScanning) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "schema information has been omitted from the payload. \"\"\" pass def Dispose(self): \"\"\"", "NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "A shallow copy of the current System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self:", "\"\"\" pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int) -> (bool,User) \"\"\" pass def", "System.Object. Returns: A shallow copy of the current System.Object. \"\"\" pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir):", "pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool \"\"\" pass def", "GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs)", "pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\"", "PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid:", "General) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing)", "Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) ->", "CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) -> ServerHealthEnum \"\"\" pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): \"\"\" CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey:", "StreamingContext) -> bool Inspects the format of the serialized representation of the DataSet.", "CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\"", "\"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass", "def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\"", "NotificationCenter,groupKey: str,userId: int) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self:", "GetVersion(self): \"\"\" GetVersion(self: General) -> str \"\"\" pass def GetWarehouseByCode(self,warehouseCode,warehouse): \"\"\" GetWarehouseByCode(self: General,warehouseCode:", "Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass @staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings", "\"\"\" pass def AddTaskNotificationSummaryTasks(self): \"\"\" AddTaskNotificationSummaryTasks(self: NotificationCenter) \"\"\" pass def DeleteNotification(self,notificationId): \"\"\" DeleteNotification(self:", "\"\"\" pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass", "current delegate.-or- null,if the method represented by the current delegate does not require", "RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\"", "\"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str", "GetPickListsTable(self: Printing) -> Hashtable \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) -> object", "int) -> (bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool) -> (int,Zones)", "WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "None) class IApplicationSettings: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted from the payload.", "pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically invokes (late-bound) the", "-> DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) ->", "Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 # no doc # no important from System.Collections.Generic", "PrintBaseArgs) -> bool \"\"\" pass def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) ->", "def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool \"\"\" pass @staticmethod def WrapException(ex):", "ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode): \"\"\" GetCacheObject(self: General,hashCode: int)", "def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self:", "pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines):", "(int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int) -> (bool,Zone) \"\"\" pass", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda self:", "GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs)", "class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def Clone(self): \"\"\"", "Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey:", "and unmanaged resources; false to release only unmanaged resources. \"\"\" pass def GetSchemaSerializable(self,*args):", "TagTarget) -> (int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\"", "\"\"\" AddUserToZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def AttachClient(self,endPoint): \"\"\" AttachClient(self:", "Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId:", "HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) ->", "\"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Inventory(MarshalByRefObject): \"\"\" Inventory(stockManager:", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor):", "-> SystemSettingsTable \"\"\" pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\" pass", "\"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) -> object \"\"\" pass def InitOrderMatchesCustomerValidator(self):", "self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class IApplicationSettings:", "GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\"", "pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) \"\"\" pass def", "ModuleArgs) -> bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str])", "\"\"\" pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass", "\"\"\" pass def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def", "str) -> (int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper)", "\"\"\" GetStdLibRoot() -> (bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int) ->", "Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "\"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid \"\"\"", "\"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\" pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs)", "of the current System.Object. \"\"\" pass def ReissueMessage(self,messageId): \"\"\" ReissueMessage(self: Messaging,messageId: Guid) \"\"\"", "def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\" pass def GetPrintJobs(self,args,paging):", "def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool \"\"\" pass def CheckHookVersions(self): \"\"\"", "pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations):", "-> Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\"", "GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) \"\"\" pass def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args:", "-> (int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\" pass", "ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): \"\"\" ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass", "\"\"\" GetItem(self: General,itemCode: str) -> (bool,Item) \"\"\" pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode:", "\"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args:", "LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass", "-> str \"\"\" pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations)", "def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines):", "BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles)", "-> DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass def", "instance of the class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "User,addActiveOnly: bool) -> (int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int) ->", "GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintJobTypesOfConfiguredPrintRules(self): \"\"\" GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue)", "GetAppVersionFileSpec(self,args): \"\"\" GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\" pass def GetAppVersions(self): \"\"\" GetAppVersions(self:", "\"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass def", "(int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\" pass", "\"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\" pass", "PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass @staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch)", "CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs]", "\"\"\" pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): \"\"\" GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass", "PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey \"\"\" pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self:", "bool) -> (int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers)", "def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg): \"\"\"", "InitializeLifetimeService(self: NotificationSummary) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "\"\"\" InitializeLifetimeService(self: Messaging) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "no doc # no important from System.Collections.Generic import * from ..__init__ import *", "System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\" pass", "GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\" pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info:", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda", "\"\"\" CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate \"\"\" pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry:", "General,warehouseTransferKey: CacheKey) -> bool \"\"\" pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\" pass", "DataFlowObject[LocationClassification] \"\"\" pass def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue:", "-> (int,List[ErpLock]) \"\"\" pass def GetErpName(self): \"\"\" GetErpName(self: General) -> str \"\"\" pass", "Messaging) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning)", "System.Data.DataSet. reader: The System.Xml.XmlReader instance that is passed during deserialization of the System.Data.DataSet.", "-> DataFlowObject[PickArgs] \"\"\" pass @staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\" pass def", "DocumentQueue) -> List[PrintJobType] \"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine]", "-> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass", "bool \"\"\" pass def CheckHookVersions(self): \"\"\" CheckHookVersions(self: General) -> bool \"\"\" pass def", "pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self):", "\"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey:", "bool) \"\"\" pass def StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None): \"\"\"", "self,v: None,lambda self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda", "DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange]", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\"", "CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject): \"\"\" CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders])", "be routed to the remote server object. Returns: A shallow copy of the", "\"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def SaveBatch(self,batch): \"\"\" SaveBatch(self: Outbound,batch: Batch) ->", "self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable \"\"\" SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda", "int,unsafe: bool) \"\"\" pass def StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\" pass def StopDiscoveryServer(self,unsafe=None):", "GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\" pass def GetTagById(self,id,tag): \"\"\" GetTagById(self:", "def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str) -> str \"\"\" pass def GetUsersActive(self,users): \"\"\"", "bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult", "RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule)", "DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs)", "def GetScanners(self): \"\"\" GetScanners(self: OfflineScanning) -> Scanners \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "str \"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) \"\"\" pass", "\"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def", "GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a static method represented by the", "-> bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool", "of the current System.Object. Returns: A shallow copy of the current System.Object. \"\"\"", "def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\"", "GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) \"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args:", "-> (int,FindableList[MobileService]) \"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable \"\"\"", "pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg):", "the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) -> int \"\"\" pass def", "def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location):", "\"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass @staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink", "LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations)", "def GetAdhocRmaCustomersByFilter(self,args,customers): \"\"\" GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\"", "\"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends a notification that the specified System.Data.DataSet property is", "\"\"\" GetZonesActive(self: General,active: bool) -> (int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General)", "-> (int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\"", "General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] \"\"\" pass def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value: object)", "PrintPickingListArgs) -> bool \"\"\" pass def PrintSSCCLabels(self,dfObject): \"\"\" PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs]", "SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] \"\"\"", "pass def GetMessageHandlers(self,args,messageHandlers): \"\"\" GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers):", "type,info: SerializationInfo,context: StreamingContext) \"\"\" pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda self:", "CallerContext(object): \"\"\" CallerContext() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return CallerContext()", "\"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId:", "\"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject:", "def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) \"\"\" pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\"", "pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def", "GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self:", "\"\"\" GetSessions(self: General) -> (int,Sessions) \"\"\" pass def GetSettings(self): \"\"\" GetSettings(self: General) ->", "ZoneScript) -> str \"\"\" pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\"", "Outbound) -> object \"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\" InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator \"\"\"", "\"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self: None)", "BatchUpdateArgs) -> (Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings:", "GetWarehousesAll(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses)", "None) class Mailer(object): \"\"\" Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "the current System.Object. \"\"\" pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange]", "bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool \"\"\"", "-> DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\"", "(int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers): \"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass", "\"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\"", "def FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str \"\"\" pass def", "-> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) ->", "the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls", "an element from the invocation list of this System.MulticastDelegate that is equal to", "\"\"\" pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self:", "CacheKey,line: PrintLineBase) -> bool \"\"\" pass def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs) ->", "def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self:", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "\"\"\" pass def SaveShipperSetting(self,shipperId,memberName,value): \"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) \"\"\" pass def", "DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self:", "def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass @staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink()", "\"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject:", "ExecuteMessageHandlerResult \"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass", "identity to its clone,which will cause remoting client calls to be routed to", "\"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type:", "CheckHookVersions(self: General) -> bool \"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str) ->", "x; see x.__class__.__doc__ for signature \"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self:", "User) -> (int,Zones) \"\"\" pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) -> (int,Zones) \"\"\"", "def SendMessage(self,endPoint,message): \"\"\" SendMessage(self: General,endPoint: str,message: str) \"\"\" pass def SendMouseClick(self,endPoint,x,y): \"\"\" SendMouseClick(self:", "pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General)", "pass def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result):", "false to delete the current System.MarshalByRefObject object's identity,which will cause the object to", "int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str)", "pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId):", "Outbound,shipperId: str,memberName: str,value: object) \"\"\" pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) ->", "pass def IsNumberUsed(self,args): \"\"\" IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool \"\"\" pass def MemberwiseClone(self,*args):", "\"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\"", "self,v: None,lambda self: None) class Mailer(object): \"\"\" Mailer() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "type,object: object,method: IntPtr) \"\"\" pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging:", "SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) -> bool \"\"\"", "pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda", "pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject): \"\"\"", "-> List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\"", "of the class\"\"\" def AddScanner(self,args): \"\"\" AddScanner(self: OfflineScanning,args: AddScannerArgs) \"\"\" pass def BosInboundListenerPullDirect(self):", "General) \"\"\" pass class PyLogger(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of", "ItemIdentification,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists:", "str) -> (bool,User) \"\"\" pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str) -> str", "GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self:", "\"\"\" pass def SetCurrentAppVersion(self,args): \"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self): \"\"\"", "\"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode:", "int) -> (bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General) -> (int,ScriptTasks) \"\"\"", "@staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self:", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: StockManager(self: Outbound) -> IStockManager \"\"\" class", "the class\"\"\" def ChangeMessagesStatus(self,messageIds,newStatus): \"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus):", "Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) ->", "-> (int,CountGroups) \"\"\" pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int) -> CountGroup \"\"\"", "pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def SaveBatch(self,batch): \"\"\" SaveBatch(self:", "pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\" pass", "instance of the class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args):", "def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str) -> (bool,User) \"\"\" pass def GetUserCacheData(self,tag): \"\"\"", "pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders):", "GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int) -> (bool,Zone) \"\"\" pass def GetZoneByName(self,name,zone): \"\"\" GetZoneByName(self:", "PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label):", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): \"\"\" __new__(cls:", "GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\"", "ZoneRightViews) -> bool \"\"\" pass def Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int) -> str", "None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class IApplicationSettings: # no doc", "\"\"\"Get: Tables(self: DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None", "InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext)", "ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self:", "General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) ->", "RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "-> (DataFlowObject[PurchaseOrders],InboundReceiveLines) \"\"\" pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\"", "\"\"\" GetMacAddress(self: General) -> str \"\"\" pass def GetModule(self,arg,module): \"\"\" GetModule(self: General,arg: ModuleArgs)", "IStockManager,messaging: Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Outbound() instance=ZZZ()", "-> str \"\"\" class Constants(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of", "def CheckBatchScan(self,args): \"\"\" CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult \"\"\" pass def CloseBatchesForPacking(self,args): \"\"\"", "pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message):", "\"\"\" pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches):", "MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program", "-> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\"", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\" Options=property(lambda self:", "\"\"\" ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) \"\"\" pass def ChangeMessageStatus(self,messageId,newStatus): \"\"\" ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus:", "\"\"\" AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return AppHost() instance=ZZZ()", "TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial',", "@staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\"", "str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) \"\"\" pass", "General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting)", "\"\"\" GetPendingPrintLineCount(self: General,key: CacheKey) -> int \"\"\" pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName:", "GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\" pass def GetLicensePlateItems(self,args,pagingParams,items): \"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams:", "invokes (late-bound) the method represented by the current delegate. args: An array of", "GetDirectOrderHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] \"\"\" pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\"", "GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass", "(late-bound) the method represented by the current delegate. args: An array of objects", "instance of the class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "str,culture: str) -> (bool,Translation) \"\"\" pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str) ->", "def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\"", "LicensePlate \"\"\" pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): \"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool", "def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args): \"\"\"", "GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems)", "\"\"\" PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self:", "def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self):", "pass def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int) -> CountGroup \"\"\" pass def GetCountGroupsByType(self,type):", "Inventory,countGroupId: int) \"\"\" pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\"", "def GetItemsOnLocationLeftToAddToLp(self,args): \"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\"", "PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of", "pass def GetCacheObjectAsXml(self,hashCode): \"\"\" GetCacheObjectAsXml(self: General,hashCode: int) -> str \"\"\" pass def GetChacheStatus(self):", "Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key:", "pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg):", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an", "str,locationCode: str,countGroupId: int,itemId: str) -> bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId:", "Sleep(self: General,seconds: int) -> str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber:", "DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification]", "from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 # no doc # no important from", "\"\"\" pass class NumberGeneration(MarshalByRefObject): \"\"\" NumberGeneration() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages',", "str) -> str \"\"\" pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str) -> object", "InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult \"\"\" pass def ReceiveItemIdMulti(self,dfObject): \"\"\" ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs])", "pass def CancelProcessCounts(self): \"\"\" CancelProcessCounts(self: Inventory) \"\"\" pass def ChangeDefaultLocationAfterTransfer(self,arg): \"\"\" ChangeDefaultLocationAfterTransfer(self: Inventory,arg:", "str) -> Array[Byte] \"\"\" pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str) -> Array[Byte]", "General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self:", "def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\"", "(bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int \"\"\" pass", "CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count]", "(bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass", "GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) ->", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass def __repr__(self,*args): \"\"\" __repr__(self:", "FormatActivationExceptionMessage(self,*args): \"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str \"\"\" pass def IsRegistered(self,type=None):", "delegate to combine with this delegate. Returns: A delegate that is the new", "object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the", "pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def DeleteCountFromCacheAndTable(self,cacheKey):", "ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg:", "-> List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self): \"\"\" GetAllExecutionSchedules(self: NotificationSummary) -> List[str] \"\"\" pass", "str) -> str \"\"\" pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate", "None) \"\"\"Get: GCloudProjectId(self: IApplicationSettings) -> str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self:", "\"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int \"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId:", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an", "\"\"\" GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args:", "shallow copy of the current System.Object. \"\"\" pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo:", "@staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda", "the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a", "GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args:", "is about to change. \"\"\" pass def ReadXmlSerializable(self,*args): \"\"\" ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\"", "System.Data.DataTable being removed. \"\"\" pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends a", "def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\"", "pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\"", "def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self:", "current delegate. args: An array of objects that are the arguments to pass", "\"\"\" pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) -> str \"\"\" pass def GetColliPresetById(self,id,colliPreset):", "int) -> BlobContent \"\"\" pass def GetFileTypes(self): \"\"\" GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] \"\"\"", "its binary format,false otherwise. \"\"\" pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises", "PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\" pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General)", "General) -> RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\"", "pass def GetSortedItemLocations(self,args,filterOptions,locations): \"\"\" GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) \"\"\" pass @staticmethod", "the current System.MulticastDelegate. Returns: A static method represented by the current System.MulticastDelegate. \"\"\"", "GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user:", "def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args):", "ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None", "pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) \"\"\" pass", "CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent])", "pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] \"\"\" pass def PrintPickList(self,args):", "DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) ->", "\"\"\" GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass @staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings()", "-> bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): \"\"\" GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) \"\"\"", "DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass", "self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing) -> License \"\"\" # variables with", "\"\"\" pass def CreateScriptTask(self,arg): \"\"\" CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def", "str) -> (bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str) -> Array[Byte]", "GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self:", "pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str) -> bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item):", "pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders): \"\"\"", "Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\" pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self:", "return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass", "GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) \"\"\" pass @staticmethod def GetLibRoot(): \"\"\" GetLibRoot() ->", "for signature \"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" def", "GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress)", "def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool) -> (int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\"", "-> (bool,TransportPackages) \"\"\" pass def UpdateReference(self,reference,cacheKey): \"\"\" UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool", "x.__class__.__doc__ for signature \"\"\" pass def __repr__(self,*args): \"\"\" __repr__(self: object) -> str \"\"\"", "for signature \"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestBaseUri(self:", "GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inbound) ->", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self):", "(bool,str) \"\"\" pass def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass", "ProcessSalesOrderQueued(self,args,order): \"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg): \"\"\"", "\"\"\" pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def", "def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity \"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self:", "def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args): \"\"\" CheckLicensePlateIntegrity(self: Inventory,args:", "ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y]", "GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) ->", "DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass def", "Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId:", "\"\"\" pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\"", "class\"\"\" def Clone(self): \"\"\" Clone(self: DataSet) -> DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\"", "def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): \"\"\" GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList):", "General,script: ZoneScript) -> str \"\"\" pass def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities", "bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance", "\"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def", "-> str \"\"\" pass def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) -> AppVersions \"\"\" pass", "-> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\"", "GetReplenishmentOrderLines(self,args,replenishmentOrderLines): \"\"\" GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self:", "CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines)", "-> bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool", "pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) \"\"\" pass", "no important from System.Collections.Generic import * from ..__init__ import * # no functions", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "\"\"\" SendMouseClick(self: General,endPoint: str,x: int,y: int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key:", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda", "ReadXmlSerializable(self: DataSet,reader: XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) -> bool \"\"\"", "the managed resources. disposing: true to release both managed and unmanaged resources; false", "Task[Stream] \"\"\" pass def EditRemotePublisher(self,req): \"\"\" EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher \"\"\" pass", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable \"\"\" Tables=property(lambda self:", "DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass", "pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) \"\"\" pass def", "CacheKey) -> bool \"\"\" pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\" pass def", "Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] \"\"\" pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager:", "DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\"", "GetWarehouseExists(self: General,warehouseCode: str) -> bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting:", "(bool,WarehouseLayout) \"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass", "self,v: None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda self: object(),lambda", "\"\"\" FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str \"\"\" pass def IsRegistered(self,type=None): \"\"\"", "CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists:", "GetItemsAll(self,args,items): \"\"\" GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): \"\"\" GetItemsOnDefaultInboundLocation(self:", "the invocation list of this System.MulticastDelegate that is equal to the specified delegate.", "\"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs)", "pass def DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self:", "HasNotifications(self,filterOn): \"\"\" HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self:", "RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes an element from the invocation", "DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass", "GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass", "pass class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\"", "UpdateMessage(self: Messaging,message: IMessage) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "(bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations): \"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) \"\"\" pass", "def GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\" pass def GetCustomersPending(self,customers): \"\"\"", "GetCacheObjectAsXml(self: General,hashCode: int) -> str \"\"\" pass def GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) ->", "General,arg: ModuleArgs) -> bool \"\"\" pass def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) ->", "None,lambda self: None) \"\"\"Get: StockManager(self: Outbound) -> IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general:", "(int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass", "of the class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "\"\"\" __new__(cls: type,general: General) \"\"\" pass class PyLogger(object): # no doc def ZZZ(self):", "RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self:", "\"\"\" pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetExecutionContexts(self):", "-> (int,LocationClassifications) \"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) \"\"\"", "ExecuteScriptTaskOnce(self: General,id: int) -> object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\" ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey:", "PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self:", "of the class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod", "pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers):", "\"\"\" pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\" pass def DeleteScanner(self,args):", "-> (int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\"", "pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines):", "pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\" pass def", "\"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) \"\"\" pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args:", "GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams)", "\"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch:", "bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def", "pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages):", "-> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The System.Xml.XmlReader instance that", "(int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\" pass", "Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\" pass def SendBroadcastMessage(self,message): \"\"\"", "(bool,LocationItem) \"\"\" pass def GetItemIdentificationExists(self,itemCode,itemId): \"\"\" GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool \"\"\"", "-> (int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) ->", "XmlReader) \"\"\" pass def ShouldSerializeRelations(self,*args): \"\"\" ShouldSerializeRelations(self: DataSet) -> bool \"\"\" pass def", "pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items):", "# no important from System.Collections.Generic import * from ..__init__ import * # no", "Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in", "pass def SaveModule(self,module): \"\"\" SaveModule(self: General,module: PythonModule) -> bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings):", "\"\"\" GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) \"\"\" pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): \"\"\" GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs)", "\"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def", "pass class ExceptionHelper(object): \"\"\" ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self:", "def GetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones): \"\"\"", "ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) ->", "\"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\" pass def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self:", "\"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\" ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\"", "\"\"\" pass def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def", "\"\"\" GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages \"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self:", "\"\"\" pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): \"\"\" PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey \"\"\"", "def GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) -> (int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self:", "pass def DeleteLocationClassification(self,arg): \"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg):", "def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): \"\"\" PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) \"\"\" pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\"", "GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self:", "pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def GenerateSerialNumbers(self,dfObject,numbersGenerated):", "GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self:", "\"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass @staticmethod", "DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The", "General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\"", "# no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return PyLogger() instance=ZZZ()", "pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def", "(int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass def GetMessages(self,args,paging,messages): \"\"\" GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\"", "ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\"", "GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable \"\"\" pass def HandleColliForStockRegistration(self,transportPackages): \"\"\" HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages)", "(int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) \"\"\" pass", "Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) \"\"\" pass def PrintRmaReceipt(self,groupGuid): \"\"\" PrintRmaReceipt(self: Inbound,groupGuid: Guid) ->", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings:", "pass def GetShippers(self,shippers): \"\"\" GetShippers(self: Outbound) -> (int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\"", "GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table data from the binary or XML", "GetScripts(self,arg,scripts): \"\"\" GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self:", "the class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def", "context: The streaming context. \"\"\" pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) ->", "# no functions # classes class AppHost(object): \"\"\" AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) \"\"\" pass def", "CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg): \"\"\" CreateTag(self:", "-> bool \"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\"", "\"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int \"\"\" pass def", "pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\" pass def GetCustomersWithPendingPackages(self,args,customers):", "\"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "\"\"\" pass def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\" pass def", "by the current System.MulticastDelegate. \"\"\" pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): \"\"\" Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str)", "DeleteBatchIfNothingChanged(self,batchCacheKey): \"\"\" DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) \"\"\" pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey])", "\"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self:", "PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun:", "\"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def", "str) -> BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject", "(int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass", "def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass def GetMacAddress(self): \"\"\"", "\"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound)", "Messaging) \"\"\" pass Messaging=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda", "passed during deserialization of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema", "List[str] \"\"\" pass def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid) -> IMessage \"\"\" pass", "def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\"", "GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Messaging)", "def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item):", "\"\"\" AttachClient(self: General,endPoint: str) \"\"\" pass def AuthenticateUser(self,args,barcodeSettings): \"\"\" AuthenticateUser(self: General,args: AuthenticationArgs) ->", "GetErpSettings(self): \"\"\" GetErpSettings(self: General) -> SystemSettings \"\"\" pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General)", "pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\"", "RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool]", "OfflineScanning) -> bool \"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\" UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool)", "(bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\"", "IApplicationSettings) -> str \"\"\" BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestLicenseCreationSecret(self:", "\"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User)", "\"\"\" pass def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass def", "DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] \"\"\" pass def DeleteShipperServiceLink(self,arg): \"\"\" DeleteShipperServiceLink(self:", "pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject):", "\"\"\"hardcoded/returns an instance of the class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) ->", "RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\" pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\"", "from a System.Data.DataTable. relation: The System.Data.DataRelation being removed. \"\"\" pass def OnRemoveTable(self,*args): \"\"\"", "CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing) -> License \"\"\"", "\"\"\" StopProfiler(self: General) \"\"\" pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): \"\"\" TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) ->", "-> (int,Warehouses) \"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\" pass", "str,x: int,y: int) \"\"\" pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): \"\"\" SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase)", "GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification)", "General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) ->", "Outbound,args: PrintShipmentDocumentArgs) -> bool \"\"\" pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) ->", "\"\"\" pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\" pass class PyLogger(object):", "GetSettings(self): \"\"\" GetSettings(self: General) -> SystemSettings \"\"\" pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General)", "GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self:", "\"\"\" GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key:", "\"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\" pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type):", "\"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass def", "CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantity(self,key,args): \"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args:", "pass def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg):", "Messaging,message: IMessage) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool \"\"\" pass def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\"", "\"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Inventory)", "GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\" GetOutboundOrderLinesBatchableByCustomers(self:", "IApplicationSettings,general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return RemotePublishing() instance=ZZZ()", "-> (int,List[Attachment]) \"\"\" pass def GetHistoryOutboundOrderCustomers(self,args,customers): \"\"\" GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) \"\"\"", "DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) \"\"\" pass def CheckLicensePlateIntegrity(self,args):", "GetPackages(self,key,packages): \"\"\" GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self:", "def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode):", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def DoGetAllInstances(self,*args): \"\"\" DoGetAllInstances(self: UnityServiceLocator,serviceType: Type)", "CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: General) -> License Set:", "\"\"\" pass @staticmethod def WrapException(ex): \"\"\" WrapException(ex: Exception) -> RemotingException \"\"\" pass class", "\"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) \"\"\" pass def", "the class\"\"\" def GetPickListsAll(self,pickLists): \"\"\" GetPickListsAll(self: Printing) -> (int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames):", "\"\"\" InitializeLifetimeService(self: RemotePublishing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool)", "GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] \"\"\" pass def GetMessage(self,messageId): \"\"\" GetMessage(self:", "doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IExtendedServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an", "\"\"\" CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\" pass", "Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId): \"\"\" GetHistoryShipment(self: Outbound,shipment: HistoryShipment)", "pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetServerDate(self): \"\"\"", "with during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in", "MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\"", "int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\" pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General)", "instance of the class\"\"\" def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type:", "Tables(self: DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None", "\"\"\" pass def GetSettings(self): \"\"\" GetSettings(self: General) -> SystemSettings \"\"\" pass def GetSettingsTable(self):", "see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod", "int) -> (bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\"", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Tables(self: DataSet) -> DataTableCollection \"\"\" PurchaseOrders_GetHistoryLinesDataTable=None", "ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self:", "\"\"\" pass def GetZoneById(self,id,zone): \"\"\" GetZoneById(self: General,id: int) -> (bool,Zone) \"\"\" pass def", "\"\"\" pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self:", "pass def CreateReplenishmentOrderLines(self,lines): \"\"\" CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def CreateReplenishmentOrders(self,dfObject):", "GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs)", "self: None) \"\"\"Get: CachedSettings(self: General) -> SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda", "(int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\"", "\"\"\" pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): \"\"\" GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) \"\"\" pass def GetWarehousesAll(self,warehouses):", "'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername',", "\"\"\" UpdateDatabase(self: General) -> (bool,str) \"\"\" pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line:", "equal to the specified delegate. value: The delegate to search for in the", "CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\"", "\"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass def GetBlobContent(self,blobId): \"\"\" GetBlobContent(self: DocumentQueue,blobId: int) ->", "ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self:", "NotificationCenter) -> List[NotificationGroup] \"\"\" pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification]", "-> DataFlowObject[CacheKey] \"\"\" pass def VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\"", "def GetSettings(self): \"\"\" GetSettings(self: General) -> SystemSettings \"\"\" pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self:", "-> (int,ReportItems) \"\"\" pass def GetPickListsForSettings(self,pickListNames): \"\"\" GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\" pass", "-> bool \"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\"", "\"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] \"\"\" pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): \"\"\"", "object \"\"\" pass def IsProfilerRunning(self): \"\"\" IsProfilerRunning(self: General) -> bool \"\"\" pass def", "General,key: CacheKey) \"\"\" pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str) -> str \"\"\"", "GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int \"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int)", "GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str \"\"\" pass def GetAppVersions(self): \"\"\" GetAppVersions(self: OfflineScanning) ->", "class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\"", "DataFlowObject[List[int]] \"\"\" pass def DeletePreReceipt(self,dfObject): \"\"\" DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] \"\"\" pass", "Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult)", "pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) -> bool \"\"\" pass def UploadFile(self,name,file,overwrite): \"\"\"", "def __repr__(self,*args): \"\"\" __repr__(self: object) -> str \"\"\" pass UserName=property(lambda self: object(),lambda self,v:", "(int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) \"\"\" pass", "invocation list. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item): \"\"\" GetItem(self: General,itemCode:", "\"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\" pass def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args:", "DataFlowObject[PickArgs] \"\"\" pass @staticmethod def RemoveBatch(batch): \"\"\" RemoveBatch(batch: Batch) \"\"\" pass def RemoveDirectOrder(self,args):", "def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of", "arguments. Returns: The object returned by the method represented by the delegate. \"\"\"", "GetVendorsArgs) -> (int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors): \"\"\" GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\"", "\"\"\" pass def UpdatePrintLine(self,key,line): \"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool \"\"\" pass", "-> (bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) \"\"\"", "doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an", "ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def IsRetryPossible(ex,currentIdentity): \"\"\" IsRetryPossible(ex:", "\"\"\" GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) \"\"\" pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active:", "pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass def", "GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode:", "-> (int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) -> object \"\"\" pass", "pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches):", "\"\"\" GetCountGroupsById(self: Inventory,id: int) -> CountGroup \"\"\" pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type:", "-> (int,Customers) \"\"\" pass def GetCustomersPendingByFilter(self,customers,args): \"\"\" GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) \"\"\"", "Decimal,overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs)", "def GetErpName(self): \"\"\" GetErpName(self: General) -> str \"\"\" pass def GetErpSettings(self): \"\"\" GetErpSettings(self:", "(int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass", "StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DocumentQueue() instance=ZZZ()", "int) -> (int,Users) \"\"\" pass def GetVersion(self): \"\"\" GetVersion(self: General) -> str \"\"\"", "DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def UpdateDirectOrderLine(self,args): \"\"\" UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine]", "def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\"", "\"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\"", "-> (int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int) -> (int,Users) \"\"\"", "bool \"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message):", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns an", "the class\"\"\" return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object):", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) -> object \"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self:", "None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: StockManager(self: Outbound)", "-> DataFlowObject[Count] \"\"\" pass def DeleteCountGroup(self,arg): \"\"\" DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\"", "def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass", "\"\"\" pass @staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str) \"\"\" pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification):", "CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId:", "str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode:", "None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class IApplicationSettings: #", "PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self:", "GetBatchByScan(self,barcode,batch): \"\"\" GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self:", "UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey:", "-> (int,Customers) \"\"\" pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\"", "-> (bool,LicensePlate) \"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\"", "(int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment \"\"\" pass", "def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self:", "@staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object):", "\"\"\" pass def AddTaskNotificationCleanupTask(self): \"\"\" AddTaskNotificationCleanupTask(self: General) \"\"\" pass def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self:", "GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\"", "GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\" pass def GetHistoryOutboundOrderLines(self,args,orderLines): \"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines)", "AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self:", "General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\" pass def GetItemImageFromErp(self,itemCode): \"\"\" GetItemImageFromErp(self: General,itemCode: str)", "(x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None", "GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages): \"\"\" GetHistoryTransportPackages(self:", "Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) ->", "str) \"\"\" pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool \"\"\" pass", "the arguments to pass to the method represented by the current delegate.-or- null,if", "-> str \"\"\" Options=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Options(self: IApplicationSettings)", "GetCountriesActive(self: General) -> (int,Countries) \"\"\" pass def GetCurrentIdentity(self): \"\"\" GetCurrentIdentity(self: General) -> RemotingIdentity", "DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream]", "\"\"\" ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self:", "DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs) -> bool", "SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool \"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId:", "pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args):", "the class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def Clone(self):", "GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass @staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\"", "(int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines)", "\"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer)", "return NumberGeneration() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self:", "pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\" AddTaskMessageQueueCleanupTask(self: General)", "\"\"\" GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType)", "-> bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg): \"\"\" CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\"", "GetUserByUserId(self: General,userId: int) -> (bool,User) \"\"\" pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str)", "def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool \"\"\" pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode):", "\"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries)", "pass def GetDirectOrderLineDetailsByLinePk(self,linePk): \"\"\" GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] \"\"\" pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams):", "def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureDefinitionById(self,countId,definition): \"\"\" GetBarcodeStructureDefinitionById(self:", "-> DataFlowObject[ReplenishmentOrder] \"\"\" pass def CreateReplenishmentOrderLine(self,line): \"\"\" CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] \"\"\"", "Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container:", "to form a new delegate. follow: The delegate to combine with this delegate.", "\"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass", "pass def AddDirectOrderLine(self,args): \"\"\" AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddDirectOrderLineItemIdentification(self,args):", "-> Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\"", "pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\"", "def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass def", "\"\"\" pass def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass def", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance", "PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs]", "pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries):", "GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) \"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey:", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General)", "GetUsersInZone(self: General,zoneId: int) -> (int,Users) \"\"\" pass def GetVersion(self): \"\"\" GetVersion(self: General) ->", "pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Mailer(object): \"\"\" Mailer() \"\"\"", "signature \"\"\" pass class OfflineScanning(MarshalByRefObject): \"\"\" OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" def ZZZ(self):", "def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey)", "def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info: SerializationInfo,context:", "object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object.", "-> str \"\"\" pass def RemoveImpl(self,*args): \"\"\" RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes", "(int,Locations) \"\"\" pass def GetLogLines(self,args): \"\"\" GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] \"\"\" pass", "\"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass", "\"\"\" CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def CreatePreReceiptLines(self,dfObject): \"\"\" CreatePreReceiptLines(self: Inbound,dfObject:", "General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass def DeleteBackgroundAgent(self,arg): \"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) ->", "\"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule): \"\"\"", "pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment \"\"\" pass def GetHistoryTransportPackages(self,shipmentId,packages):", "def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems \"\"\" pass def InitializeLifetimeService(self): \"\"\"", "\"\"\" CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult \"\"\" pass def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg:", "DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is", "pass def CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self:", "def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self: NotificationCenter)", "DisposeTransportPackagesWhenUnchanged(self,dfObject): \"\"\" DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self:", "pass def GetAllConfigurations(self): \"\"\" GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] \"\"\" pass def GetAllExecutionSchedules(self): \"\"\"", "instance. context: The streaming context. \"\"\" pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet)", "bool \"\"\" pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): \"\"\" ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str)", "AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists):", "LoadCache(self): \"\"\" LoadCache(self: General) \"\"\" pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject:", "RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs)", "OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult \"\"\" pass def CombineImpl(self,*args): \"\"\" CombineImpl(self: MulticastDelegate,follow:", "(int,AllocationProfiles) \"\"\" pass def GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings \"\"\" pass", "DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream]", "def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\" pass", "-> (bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) \"\"\" pass", "\"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args:", "\"\"\" pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str) -> object \"\"\" pass def", "AppHost() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns", "def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self:", "-> (int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure)", "-> Task[Stream] \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: OfflineScanning) \"\"\" pass def GetAppVersionFileSpec(self,args):", "unmanaged resources. \"\"\" pass def GetSchemaSerializable(self,*args): \"\"\" GetSchemaSerializable(self: DataSet) -> XmlSchema \"\"\" pass", "pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends a notification that the specified", "KillAppDomain(self: General,filter: str) \"\"\" pass def LoadCache(self): \"\"\" LoadCache(self: General) \"\"\" pass def", "GetCustomers(self,args,customers): \"\"\" GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\" pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self:", "-> (bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): \"\"\" GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\"", "str \"\"\" pass def IsRegistered(self,type=None): \"\"\" IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type)", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance", "HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsById(self,shipmentId): \"\"\" GetHistoryShipmentsById(self: Outbound,shipmentId: int) ->", "\"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def", "(int,SalesOrderLines) \"\"\" pass def GetSalesOrderLines(self,args,salesOrderLines): \"\"\" GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) \"\"\" pass", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\" pass def GetMessageHandlers(self,args,messageHandlers): \"\"\"", "Sends a notification that the specified System.Data.DataSet property is about to change. name:", "GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet)", "GetUsersInactive(self,users): \"\"\" GetUsersInactive(self: General) -> (int,Users) \"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId:", "Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key:", "\"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self:", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationSummary() instance=ZZZ() \"\"\"hardcoded/returns an instance", "GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\" pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): \"\"\" GetBarcodeStructure(self: General,value:", "LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files", "def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass def RemoveDirectOrderLine(self,args): \"\"\" RemoveDirectOrderLine(self: Outbound,args:", "copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client", "General,warehouseCode: str,filter: str) -> (int,LocationItems) \"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs)", "PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DocumentQueue() instance=ZZZ() \"\"\"hardcoded/returns", "GetDeviceByMacAddress(self,macAddress,device): \"\"\" GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) \"\"\" pass def GetDeviceByName(self,name,device): \"\"\" GetDeviceByName(self:", "General,id: int) -> (bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) ->", "ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode: str,itemId:", "CountGroup \"\"\" pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\" pass", "def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): \"\"\" ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) \"\"\" pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject:", "no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return BusinessLayerExtensions() instance=ZZZ() \"\"\"hardcoded/returns", "the current System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExtendedUnityServiceLocator() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "-> Guid \"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule \"\"\"", "bool) \"\"\" pass def PurgeProfilingLog(self): \"\"\" PurgeProfilingLog(self: General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\"", "Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str)", "GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName:", "def ValidateColliReferences(self,dfObject): \"\"\" ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\"", "Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation:", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,implementationContainer=None): \"\"\" __new__(cls: type,implementationContainer:", "EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\" pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) ->", "\"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str \"\"\" MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self: None)", "(int,LocationItems) \"\"\" pass def GetItemsOnTransportLocation(self,filter,items): \"\"\" GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) \"\"\" pass", "no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ICentralAuthoritySystem() instance=ZZZ() \"\"\"hardcoded/returns", "System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging):", "with during deserialization in remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema", "Task[Guid] \"\"\" pass def AddPrintJobScriptOverride(self,args,blobId,blobName): \"\"\" AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid", "VoidShipment(self,shipment): \"\"\" VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "-> (int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "\"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "def DeletePrintJobs(self,jobIds): \"\"\" DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId:", "object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) ->", "\"\"\" CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\"", "pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass def StopProfiler(self): \"\"\" StopProfiler(self: General)", "\"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks):", "Constants(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Constants()", "\"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet.", "\"\"\" pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def", "\"\"\" GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) \"\"\" pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id:", "GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget) -> (int,Tags) \"\"\" pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self:", "EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self: RemotePublishing) -> Publishers", "bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\" AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool", "x.__class__.__doc__ for signature \"\"\" pass class IExtendedServiceLocator: # no doc def ZZZ(self): \"\"\"hardcoded/mock", "\"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def", "StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType',", "pass def GetServerDate(self): \"\"\" GetServerDate(self: General) -> DateTime \"\"\" pass def GetSessions(self,sessions): \"\"\"", "\"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass def GetOutboundOrders(self,args,orders):", "int) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a", "\"\"\" GetWarehouseExists(self: General,warehouseCode: str) -> bool \"\"\" pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): \"\"\" GetWarehouseLayoutBySetting(self: General,warehouseLocation:", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class Inbound(MarshalByRefObject):", "List[str],overwriteIfExists: bool) -> bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists:", "class\"\"\" return Constants() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch'", "\"\"\" pass def GetBackgroundAgentStatusByType(self,type): \"\"\" GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass def", "(int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\" pass", "GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey)", "SetZoneRightsOfZone(self,zoneId,zoneRights): \"\"\" SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool \"\"\" pass def Sleep(self,seconds): \"\"\"", "pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule \"\"\" pass def DeletePrintJobs(self,jobIds):", "CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateZeroCountByCountGroup(self,countGroupId): \"\"\" CreateZeroCountByCountGroup(self:", "-> DataFlowObject[User] \"\"\" pass def DeleteWarehouseLayoutSetting(self,arg): \"\"\" DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] \"\"\"", "str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom:", "\"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self:", "copy of the current System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs])", "the class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def CreateContainer(self):", "def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" pass class ExceptionHelper(object): \"\"\"", "\"\"\" pass def GetScreenshot(self,accessId): \"\"\" GetScreenshot(self: General,accessId: str) -> Array[Byte] \"\"\" pass def", "ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] \"\"\" pass def RemoveCountItemIdentification(self,key,itemId): \"\"\" RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId:", "(int,Items) \"\"\" pass def GetItemVendors(self,args,vendors): \"\"\" GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) \"\"\" pass", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str)", "PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self:", "\"\"\" UploadModule(self: General,arg: AddModuleArgs) -> bool \"\"\" pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml:", "PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintPrintLines(self,key,label):", "pass def GetZonesActive(self,active,zones): \"\"\" GetZonesActive(self: General,active: bool) -> (int,Zones) \"\"\" pass def GetZonesActiveOfCurrentUser(self,zones):", "the class\"\"\" def AddRemotePublisher(self,req): \"\"\" AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\" pass def", "\"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self: General)", "class NotificationCenter(MarshalByRefObject): \"\"\" NotificationCenter(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType]", "Clone(self: DataSet) -> DataSet \"\"\" pass def DetermineSchemaSerializationMode(self,*args): \"\"\" DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext)", "def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\"", "pass def GenerateNumbers(self,dfObject): \"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId):", "def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args):", "\"\"\" pass def CheckLicenseFile(self,xml,errors,license): \"\"\" CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) \"\"\" pass def", "pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons):", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,container): \"\"\" __new__(cls:", "for this instance,then a new System.Delegate without value in its invocation list; otherwise,this", "System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with", "are attached to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None)", "DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg): \"\"\" CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass", "\"\"\" pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\" pass def GetScanners(self):", "CacheKey) -> (bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int) -> (bool,Count)", "\"\"\" pass def GetZonesActiveOfCurrentUser(self,zones): \"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones):", "System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause", "pass def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\"", "RemotePublishing) -> Publishers \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: RemotePublishing) -> object \"\"\"", "StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\" pass def StartProfiler(self): \"\"\" StartProfiler(self: General) \"\"\"", "\"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self: General,filter: str) -> (int,Tags) \"\"\" pass def", "\"\"\" pass def DisposeReceiptWhenUnchanged(self,dfObject): \"\"\" DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def", "\"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey:", "TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp): \"\"\" UpdateLicensePlate(self:", "-> DateTime \"\"\" pass def GetSessions(self,sessions): \"\"\" GetSessions(self: General) -> (int,Sessions) \"\"\" pass", "DisposeCachedObjects(self: General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass def", "-> bool \"\"\" pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\"", "MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): \"\"\" MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "\"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\"", "GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) ->", "(int,Sessions) \"\"\" pass def GetSettings(self): \"\"\" GetSettings(self: General) -> SystemSettings \"\"\" pass def", "of the current System.Object. \"\"\" pass def ResetNumberRange(self,dfObject): \"\"\" ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) ->", "\"\"\" pass def GetLicensePlateByCode(self,args,licensePlate): \"\"\" GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) \"\"\" pass def", "System.Data.DataSet. table: The System.Data.DataTable being removed. \"\"\" pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name:", "pass def GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int) -> (bool,User) \"\"\" pass def GetUserByUserName(self,username,user):", "def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg): \"\"\"", "\"\"\" pass def GetShipperSettingsTableById(self,shipperId): \"\"\" GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable \"\"\" pass def", "\"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "-> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\"", "IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of", "def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation) \"\"\" pass def GetScreenshot(self,accessId):", "def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): \"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order):", "BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId: str) \"\"\"", "def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool \"\"\" pass def", "str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo:", "RemotePublishing,req: AddRemotePublisherArgs) -> Publisher \"\"\" pass def DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\"", "Delegate) -> Delegate Combines this System.Delegate with the specified System.Delegate to form a", "clone,which will cause remoting client calls to be routed to the remote server", "RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "AddModuleArgs) -> bool \"\"\" pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str) -> (bool,License)", "(RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def", "the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of", "of the current System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\"", "invocation list. \"\"\" pass def DynamicInvokeImpl(self,*args): \"\"\" DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically", "VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "str) -> UiForm \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) -> object \"\"\"", "\"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\" pass def ResetPrintLines(self,key,printLines):", "NotificationSummary,notificationSummaryId: int) \"\"\" pass def DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def", "str,warehouseCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode:", "AddOrUpdateErpLockDirect(self,lock): \"\"\" AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int \"\"\" pass def AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self:", "def CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\" pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\"", "\"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self:", "Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams)", "pass def RedispatchPrintJobWithPrinter(self,args): \"\"\" RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self:", "(int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) \"\"\" pass", "DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject): \"\"\" DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] \"\"\" pass", "\"\"\" GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId:", "GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode:", "SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda", "\"\"\" GetItemExists(self: General,itemCode: str) -> bool \"\"\" pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode:", "DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder]", "pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): \"\"\" GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders):", "Inspects the format of the serialized representation of the DataSet. info: The System.Runtime.Serialization.SerializationInfo", "-> str \"\"\" pass UserName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: UserName(self:", "\"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\" pass def GetServerDate(self): \"\"\" GetServerDate(self: General) ->", "\"\"\" pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\" pass def PrintPrintLine(self,line,label): \"\"\" PrintPrintLine(self:", "def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int \"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\"", "\"\"\" Sleep(self: General,seconds: int) -> str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self:", "Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches)", "PurchaseOrderArgs) -> (bool,PurchaseOrder) \"\"\" pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) ->", "\"\"\" pass def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def", "GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): \"\"\" GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\"", "pass def __getitem__(self,*args): \"\"\" x.__getitem__(y) <==> x[y] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "\"\"\" GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound)", "DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\"", "pass def DequeueNextMessage(self): \"\"\" DequeueNextMessage(self: Messaging) -> DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args): \"\"\"", "def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): \"\"\" CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog \"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\"", "OfflineScanning) -> int \"\"\" pass def DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass", "pass def GetDeviceById(self,id,device): \"\"\" GetDeviceById(self: General,id: int) -> (bool,Device) \"\"\" pass def GetDeviceByMacAddress(self,macAddress,device):", "def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self:", "SetSessionTimeout(self: General) \"\"\" pass def SetUserCacheData(self,tag,data): \"\"\" SetUserCacheData(self: General,tag: str,data: str) \"\"\" pass", "-> DataFlowObject[BackgroundAgent] \"\"\" pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\"", "DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def DeleteColliPreset(self,arg): \"\"\" DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset])", "def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs) -> bool \"\"\" pass def UploadNewLicense(self,xml,license): \"\"\"", "delegate. follow: The delegate to combine with this delegate. Returns: A delegate that", "the method represented by the current delegate.-or- null,if the method represented by the", "\"\"\" pass def GetDefaultColliPreset(self,colliPreset): \"\"\" GetDefaultColliPreset(self: General) -> (bool,ColliPreset) \"\"\" pass def GetDefaultInboundLocations(self,warehouseCode,locations):", "pass def DoGetInstance(self,*args): \"\"\" DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object \"\"\" pass def", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings:", "pass def RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def", "AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files", "def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationSummary) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "instance of the class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class CallerContext(object):", "\"\"\" GetScriptsAll(self: General) -> (int,ZoneScripts) \"\"\" pass def GetScriptSnippets(self,snippets): \"\"\" GetScriptSnippets(self: General) ->", "being removed. \"\"\" pass def RaisePropertyChanging(self,*args): \"\"\" RaisePropertyChanging(self: DataSet,name: str) Sends a notification", "Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound)", "\"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self:", "int) -> int \"\"\" pass def GetNumberRangeById(self,rangeId): \"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange", "System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked", "General,countId: int) -> (bool,BarcodeStructureDefinition) \"\"\" pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): \"\"\" GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams)", "-> (bool,PrintLinesBase) \"\"\" pass def SetSessionTimeout(self): \"\"\" SetSessionTimeout(self: General) \"\"\" pass def SetUserCacheData(self,tag,data):", "General,countGroup: CountGroup) -> (int,Locations) \"\"\" pass def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) ->", "def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object): # no doc", "\"\"\"hardcoded/mock instance of the class\"\"\" return Inventory() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "\"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) \"\"\" pass def", "the method represented by the current delegate. args: An array of objects that", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,general): \"\"\" __new__(cls: type,general: General) \"\"\"", "the current delegate.-or- null,if the method represented by the current delegate does not", "def LoadCache(self): \"\"\" LoadCache(self: General) \"\"\" pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self: General,unsafe: bool)LoadSettings(self:", "DataFlowObject[DirectOrderLine] \"\"\" pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): \"\"\" RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\"", "List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] \"\"\" pass", "-> (bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self:", "def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) \"\"\" pass def CheckBatchScan(self,args): \"\"\"", "Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) ->", "bool) -> bool \"\"\" pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): \"\"\" AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool)", "-> DequeueResult \"\"\" pass def ExecuteMessageHandler(self,args): \"\"\" ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult \"\"\"", "-> bool \"\"\" pass def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs) -> bool \"\"\"", "GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations)", "PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo):", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance", "\"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\" pass def MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId:", "IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool \"\"\" pass def __enter__(self,*args):", "during deserialization in remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information", "Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers)", "System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass", "IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass class", "the class\"\"\" return Inbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate):", "\"\"\" pass def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def", "def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) \"\"\" pass def GetUserByUserId(self,userId,user): \"\"\"", "Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs])", "instance,then a new System.Delegate without value in its invocation list; otherwise,this instance with", "self,v: None,lambda self: None) \"\"\"Get: StockManager(self: Outbound) -> IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\"", "int) \"\"\" pass def DeletePrintRules(self,ruleIds): \"\"\" DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) \"\"\" pass def GetBlobContent(self,blobId):", "\"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def RegisterQueues(self,container):", "\"\"\" GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemImageLarge(self,itemCode): \"\"\" GetItemImageLarge(self: General,itemCode:", "def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\"", "\"\"\" pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass def", "\"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] \"\"\" pass def", "-> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdateDirectOrderLineItemIdentification(self,args): \"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\"", "an instance of the class\"\"\" def AddUsedNumber(self,args): \"\"\" AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass", "\"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def CreateDatabase(self,message): \"\"\" CreateDatabase(self: General)", "\"\"\" pass def GetItemsOnLocation(self,args,items): \"\"\" GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) \"\"\" pass def", "General) \"\"\" pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\" pass def CompileScript(self,script): \"\"\"", "\"\"\" pass def LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self:", "-> (int,FindableList[IShipper]) \"\"\" pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): \"\"\" GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) \"\"\"", "def CreateStorageAssignmentClassification(self,arg): \"\"\" CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg): \"\"\"", "OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) \"\"\" pass def OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str)", "__new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self:", "\"\"\" ScheduleScriptTasks(self: General) \"\"\" pass def SendBroadcastMessage(self,message): \"\"\" SendBroadcastMessage(self: General,message: str) \"\"\" pass", "\"\"\" RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args:", "-> (int,List[str]) \"\"\" pass def GetCopyOfCache(self): \"\"\" GetCopyOfCache(self: General) -> List[ICachable] \"\"\" pass", "\"\"\" pass def FormatActivateAllExceptionMessage(self,*args): \"\"\" FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str \"\"\" pass", "List[int]) \"\"\" pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass def GetAllConfigurations(self): \"\"\"", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager:", "(bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass", "\"\"\" pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: BosRestBaseUri(self: IApplicationSettings) ->", "pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass", "return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self:", "str) -> (bool,LocationItem) \"\"\" pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): \"\"\" GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str)", "\"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\" pass @staticmethod def GetSnippetRoot(): \"\"\" GetSnippetRoot() ->", "\"\"\" pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a static method", "ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId: Guid,decodeAs:", "Inventory,id: int) -> CountGroup \"\"\" pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) ->", "Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass def GetAllItemIdentifications(self,filterBy): \"\"\" GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) ->", "(int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\"", "self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class IApplicationSettings: # no", "\"\"\" GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetScriptsAll(self,scripts): \"\"\" GetScriptsAll(self: General)", "\"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def PickInBatch(self,dfObject): \"\"\" PickInBatch(self: Outbound,dfObject:", "\"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs)", "GetRmaOrdersAll(self,rmaOrders): \"\"\" GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) \"\"\" pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): \"\"\" GetRmaOrdersByFilter(self: Inbound,filterBy:", "-> (bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\"", "def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): \"\"\" PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool \"\"\" pass def", "GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) \"\"\" pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): \"\"\" GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams:", "\"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args:", "bool \"\"\" pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): \"\"\" UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) \"\"\"", "-> (bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] \"\"\"", "General,hashCode: int) -> DataFlowObject[object] \"\"\" pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\" pass", "CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) \"\"\" pass def DeleteCountFromCache(self,arg): \"\"\" DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey]", "StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: StockManager(self: Outbound) -> IStockManager \"\"\"", "pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) \"\"\" pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines):", "-> (int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) \"\"\"", "EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\" pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self:", "for signature \"\"\" pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): \"\"\" __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) \"\"\"", "GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetBarcodeStructureActive(self,definitions): \"\"\" GetBarcodeStructureActive(self: General)", "pass def PrintPackageSlip(self,args): \"\"\" PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool \"\"\" pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label):", "RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider:", "General,user: User,addActiveOnly: bool) -> (int,Zones) \"\"\" pass def GetZoneUsers(self,zoneId,zoneUsers): \"\"\" GetZoneUsers(self: General,zoneId: int)", "pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines):", "\"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool \"\"\"", "bool \"\"\" pass @staticmethod def WrapException(ex): \"\"\" WrapException(ex: Exception) -> RemotingException \"\"\" pass", "DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def PrepareInboundReceiveLines(self,args,cacheKey): \"\"\" PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey", "PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey \"\"\" pass def", "pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda self: object(),lambda self,v: None,lambda self:", "Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject): \"\"\" PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) ->", "General) -> SystemSettingsTable \"\"\" pass def GetExecutionContexts(self): \"\"\" GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] \"\"\"", "\"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\" MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] \"\"\" pass def", "the invocation list for this instance,then a new System.Delegate without value in its", "GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) \"\"\" pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): \"\"\" GetResourcesOfTranslation(self:", "otherwise. \"\"\" pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event.", "RemoveUserFromZone(self,zone,user): \"\"\" RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool \"\"\" pass def ResetBarcodeSettingsToDefault(self): \"\"\"", "GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs)", "def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int \"\"\" pass def GetCountGroups(self,filter,countGroups): \"\"\"", "\"\"\" SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) \"\"\" pass def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args:", "\"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) \"\"\" pass", "Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) ->", "\"\"\" pass @staticmethod def __new__(self,object,method): \"\"\" __new__(cls: type,object: object,method: IntPtr) \"\"\" pass def", "\"\"\" pass def UpdateBatchWithSettings(self,batchId,args): \"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject):", "ExecuteCommand(self: General,command: str) -> str \"\"\" pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str)", "DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass def UpdateDatabase(self,message): \"\"\"", "AddWarehouseTransferItemIdentitifications(self,key,args): \"\"\" AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool \"\"\" pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): \"\"\"", "pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) \"\"\" pass @staticmethod def", "MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "pass def ValidateOrder(self,orderNumber,orderType): \"\"\" ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\" pass def", "CreateContainer(self: AppHost) -> UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem)", "GetServerDate(self): \"\"\" GetServerDate(self: General) -> DateTime \"\"\" pass def GetSessions(self,sessions): \"\"\" GetSessions(self: General)", "MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\"", "GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder)", "def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\" CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items):", "\"\"\"Get: StockManager(self: Outbound) -> IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\" def", "\"\"\" pass def GetUsersInZone(self,zoneId,users): \"\"\" GetUsersInZone(self: General,zoneId: int) -> (int,Users) \"\"\" pass def", "@staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass @staticmethod def GetDefaultBatchSink(): \"\"\"", "\"\"\" UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs])", "IApplicationSettings() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args): \"\"\" x.__init__(...) initializes", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns an instance", "str) -> str \"\"\" pass def GetAllocationProfiles(self,profiles): \"\"\" GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) \"\"\"", "str) -> (bool,Item) \"\"\" pass def GetItemExists(self,itemCode): \"\"\" GetItemExists(self: General,itemCode: str) -> bool", "(int,Batches) \"\"\" pass def GetBatchesByFilter(self,args,batches): \"\"\" GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) \"\"\" pass", "Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod def Info(msg): \"\"\" Info(msg: str) \"\"\" pass @staticmethod", "GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId:", "\"\"\" GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args:", "IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class", "pass def CreateCountGroup(self,arg): \"\"\" CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch):", "to this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self:", "\"\"\" SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: OfflineScanning) -> bool", "pass def ExecuteCommand(self,command): \"\"\" ExecuteCommand(self: General,command: str) -> str \"\"\" pass def ExecuteScript(self,script):", "\"\"\" UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool \"\"\" pass def UploadModule(self,arg): \"\"\" UploadModule(self:", "A System.ComponentModel.PropertyChangedEventArgs that contains the event data. \"\"\" pass def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self:", "-> (bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): \"\"\" PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int)", "GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self:", "int) -> (int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines)", "def AddTaskStockStreamTask(self): \"\"\" AddTaskStockStreamTask(self: General) \"\"\" pass def AddUserToZone(self,zone,user): \"\"\" AddUserToZone(self: General,zone: Zone,user:", "(bool,LocationClassification) \"\"\" pass def GetLocationClassifications(self,filterBy,locationClassifications): \"\"\" GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) \"\"\" pass", "bool) -> bool \"\"\" pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool)", "\"\"\" GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] \"\"\" pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): \"\"\" GetOutboundOrdersBatchable(self: Outbound,args:", "str,itemCode: str) -> (int,ItemLocations) \"\"\" pass @staticmethod def GetPrintAllocationSettings(): \"\"\" GetPrintAllocationSettings() -> AllocationSettings", "'Warn', ] class RemotePublishing(MarshalByRefObject): \"\"\" RemotePublishing(appSettings: IApplicationSettings,general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance", "SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool \"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self:", "IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool \"\"\" pass @staticmethod def WrapException(ex): \"\"\" WrapException(ex: Exception)", "Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def ProcessSalesOrder(self,args,order): \"\"\" ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines])", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda self:", "\"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject:", "def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\"", "-> AllocationSettings \"\"\" pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\"", "GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZoneScriptsOrphan(self,arg,scripts): \"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts)", "def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) \"\"\" pass def GetVendors(self,args,vendors): \"\"\"", "(bool,ItemStockTotals) \"\"\" pass def GetLibContent(self,arg,contents): \"\"\" GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) \"\"\" pass", "IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self:", "List[str] \"\"\" pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm \"\"\" pass", "GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskByName(self,name,task): \"\"\" GetScriptTaskByName(self:", "pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: OfflineScanning) -> License", "ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns an instance of", "DataSet() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns", "GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): \"\"\" GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode:", "EditRemotePublisherArgs) -> Publisher \"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def", "pass def GetProfilingUserNodes(self,userNodes): \"\"\" GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\"", "GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self:", "-> (bool,Count) \"\"\" pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) \"\"\"", "class\"\"\" def AddDirectOrder(self,args): \"\"\" AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\" pass def AddDirectOrderLine(self,args):", "DeleteConfigurations(self,notificationSummaryIds): \"\"\" DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\"", "IsProfilerRunning(self: General) -> bool \"\"\" pass def KillAppDomain(self,*__args): \"\"\" KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) ->", "\"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass def", "object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "\"\"\" InitializeLifetimeService(self: OfflineScanning) -> object \"\"\" pass def IsBosInboundListenerRunning(self): \"\"\" IsBosInboundListenerRunning(self: OfflineScanning) ->", "str,orderType: InboundOrderTypeEnum) -> bool \"\"\" pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): \"\"\" CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) \"\"\"", "\"\"\" pass def GetUsersActive(self,users): \"\"\" GetUsersActive(self: General) -> (int,Users) \"\"\" pass def GetUsersAll(self,users):", "pass def ConvertToUsersByZone(self,oZonesUsersProxy): \"\"\" ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg):", "-> (bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool", "bool) Releases the unmanaged resources used by the System.ComponentModel.MarshalByValueComponent and optionally releases the", "(bool,Tag) \"\"\" pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) -> (int,Tags) \"\"\" pass def", "def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) -> (int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self:", "DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset]", "signature \"\"\" pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): \"\"\" __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\"", "a DataSet serialized in its binary format,false otherwise. \"\"\" pass def OnPropertyChanging(self,*args): \"\"\"", "def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): \"\"\" GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) \"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\"", "\"\"\" pass def GetLocationsByCountGroup(self,countGroup,locations): \"\"\" GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) \"\"\" pass def", "pass def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\" pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General)", "str) -> bool \"\"\" pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): \"\"\" ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine])", "instance of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) ->", "DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) \"\"\" pass def DeleteCountFromTable(self,arg): \"\"\" DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count]", "GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) \"\"\" pass def GetCountGroupIdByType(self,type): \"\"\" GetCountGroupIdByType(self:", "ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition])", "\"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def DeleteErpLock(self,lock): \"\"\" DeleteErpLock(self: General,lock:", "object \"\"\" pass def MarkAsRead(self,notificationId,userId): \"\"\" MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) \"\"\" pass def", "str) -> (bool,ItemInfo) \"\"\" pass def GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation)", "\"\"\" BeepContinuous(self: General,endPoint: str) \"\"\" pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) ->", "# no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return IApplicationSettings() instance=ZZZ()", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str \"\"\" MailgunDefaultSender=property(lambda", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): \"\"\" AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId:", "(int,RmaOrderLines) \"\"\" pass def GetRmaOrderPrintLines(self,key,lines): \"\"\" GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) \"\"\" pass", "\"\"\" GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) \"\"\" pass def GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject:", "-> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PickMultipleScannedItemIdsInBatch(self,dfObject): \"\"\" PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\"", "ICentralAuthoritySystem: # no doc def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ICentralAuthoritySystem()", "-> str \"\"\" pass def ExecuteScript(self,script): \"\"\" ExecuteScript(self: General,script: str) -> object \"\"\"", "-> (int,ReplenishmentOrders) \"\"\" pass def GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList)", "instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def", "self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda", "def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass def CreatePreReceipt(self,dfObject): \"\"\" CreatePreReceipt(self: Inbound,dfObject:", "DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] \"\"\" pass def ValidateColliReferenceScan(self,barcode,result): \"\"\" ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult)", "\"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\" GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject:", "ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc'", "List[str] \"\"\" pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\" pass def", "def SaveConfiguration(self,model): \"\"\" SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key): \"\"\" GetWarehouseTransfer(self:", "GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] \"\"\" pass def GetOperators(self): \"\"\" GetOperators(self: DocumentQueue) ->", "DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] \"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager:", "str) -> List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder)", "def GetBatchesIncompleteSmall(self,batches): \"\"\" GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) \"\"\" pass def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self:", "\"\"\" pass def RegisterQueues(self,container): \"\"\" RegisterQueues(self: AppHost,container: IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object): #", "-> (int,LibContents) \"\"\" pass @staticmethod def GetLibRoot(): \"\"\" GetLibRoot() -> str \"\"\" pass", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class OfflineScanning(MarshalByRefObject):", "-> CacheKey \"\"\" pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): \"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) ->", "object's identity,which will cause the object to be assigned a new identity when", "List[PrintRuleLine] \"\"\" pass def GetUsedAttributeNames(self,args): \"\"\" GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass", "\"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg): \"\"\" CreateScript(self: General,arg:", "CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): \"\"\" CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType:", "\"\"\" pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): \"\"\" IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) \"\"\" pass", "-> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RmaOrders_GetHistoryLines(self: DataSet)", "None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" class ICentralAuthoritySystem: # no doc def", "\"\"\" pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) -> Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey):", "def CloseBatchesForPacking(self,args): \"\"\" CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) \"\"\" pass def CloseBatchForPickingById(self,id): \"\"\" CloseBatchForPickingById(self: Outbound,id:", "\"\"\" MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunBaseUrl(self: IApplicationSettings) -> str", "pass def GetSalesOrder(self,args,salesOrder): \"\"\" GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) \"\"\" pass def GetSalesOrderCostLines(self,args,salesOrderCostLines):", "pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode):", "\"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\"", "def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def GenerateReplenishmentOrder(self,warehouseToCode): \"\"\"", "OfflineScanning,name: str,file: Stream,overwrite: bool) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable)", "GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass def GetProgressUpdate(self,args,progress): \"\"\" GetProgressUpdate(self: General,args: GetActivityProgressArgs)", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda self:", "str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count) \"\"\"", "(bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General) -> (int,PrintLabels) \"\"\" pass def", "def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\"", "\"\"\" pass def GetNotifications(self,filterOn): \"\"\" GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] \"\"\" pass def", "\"\"\" GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self:", "str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) \"\"\" pass", "pass def CreatePrintLabel(self,arg): \"\"\" CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def CreateScript(self,arg):", "class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def Debug(msg):", "PrintShipmentDocumentArgs) -> bool \"\"\" pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool", "to release both managed and unmanaged resources; false to release only unmanaged resources.", "General,name: str) -> (bool,PrintLabel) \"\"\" pass def GetPrintLabelImage(self,labelId): \"\"\" GetPrintLabelImage(self: General,labelId: str) ->", "UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool \"\"\" pass def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg:", "(int,IList[MessageHandlerDescriptorSerializable]) \"\"\" pass def GetMessagePublishers(self,args,messagePublishers): \"\"\" GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) \"\"\" pass", "DataFlowObject[ReplenishmentOrderLines] \"\"\" pass def DeleteReplenishmentOrders(self,dfObject): \"\"\" DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass", "The System.Data.DataRelation being removed. \"\"\" pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs", "Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams:", "GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): \"\"\" GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self): \"\"\"", "CleanupCacheHistory(self): \"\"\" CleanupCacheHistory(self: General) \"\"\" pass def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\" pass", "pass def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) -> (int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\"", "List[str]) -> bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) ->", "Guid,label: PrintLabel) -> bool \"\"\" pass def ProcessBatchPacking(self,dfObject): \"\"\" ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) ->", "PagedList[QueuedPrintJob] \"\"\" pass def GetPrintJobTypes(self): \"\"\" GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def", "Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) \"\"\" pass @staticmethod def Info(msg): \"\"\" Info(msg: str) \"\"\"", "pass def CreateModule(self,arg): \"\"\" CreateModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def CreateOrUpdateBackgroundAgent(self,arg):", "CreateMessage(self,message): \"\"\" CreateMessage(self: Messaging,message: IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid)", "DocumentQueue,args: RedispatchPrintJobArgs) \"\"\" pass def SavePrintRule(self,rule): \"\"\" SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule \"\"\"", "OpenBatchForPickingById(self,id,cacheKey,batch): \"\"\" OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self:", "None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self:", "\"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass def", "(OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesFromBatches(self): \"\"\" GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] \"\"\" pass def", "InitializeLifetimeService(self: RemotePublishing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "pass def BosInboundListenerPullDirect(self): \"\"\" BosInboundListenerPullDirect(self: OfflineScanning) -> int \"\"\" pass def DeleteScanner(self,args): \"\"\"", "(int,ProfilingUserNodes) \"\"\" pass def GetProgressOfActivity(self,args,activity): \"\"\" GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) \"\"\" pass", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,container): \"\"\" __new__(cls: type,container: IUnityContainer)", "def OnRemoveRelation(self,*args): \"\"\" OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation object is removed", "def RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) ->", "pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x;", "AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\" AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs)", "\"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\" pass def", "\"\"\" SaveModule(self: General,module: PythonModule) -> bool \"\"\" pass def SavePrintLabelMappings(self,labelId,mappings): \"\"\" SavePrintLabelMappings(self: General,labelId:", "\"\"\" pass def OnPropertyChanging(self,*args): \"\"\" OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent:", "GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) -> (int,Customers) \"\"\" pass def GetRmaCustomersExpectedByFilter(self,args,customers): \"\"\" GetRmaCustomersExpectedByFilter(self: Inbound,args:", "Guid,args: BatchUpdateArgs) \"\"\" pass def UpdateColloReference(self,dfObject): \"\"\" UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\"", "\"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) -> str", "def PrintPrintLines(self,key,label): \"\"\" PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObject(self,lines,label):", "bool \"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool \"\"\" pass", "def GetBatchesIncompleteByFilter(self,args,batches): \"\"\" GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteSmall(self,batches): \"\"\"", "\"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\" pass def", "\"\"\" GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) \"\"\" pass def GetHistoryOutboundOrders(self,args,outboundOrders): \"\"\" GetHistoryOutboundOrders(self: Outbound,args:", "PurgeProfilingLog(self: General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass def", "pass def DeletePrintLabel(self,arg): \"\"\" DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] \"\"\" pass def DeleteScript(self,arg):", "General,script: str,scope: Dictionary[str,object]) -> object \"\"\" pass def FinishUploadModule(self,arg): \"\"\" FinishUploadModule(self: General,arg: ModuleArgs)", "SystemSettings \"\"\" pass def GetErpSettingsTable(self): \"\"\" GetErpSettingsTable(self: General) -> SystemSettingsTable \"\"\" pass def", "def GetLocationsByLocationClassification(self,locationClassification,locations): \"\"\" GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\"", "object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "Messaging) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return OfflineScanning() instance=ZZZ() \"\"\"hardcoded/returns", "General) \"\"\" pass def RegisterBackgroundAgentLastSeen(self,agent): \"\"\" RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) \"\"\" pass def RemoveUserFromZone(self,zone,user):", "-> (int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines)", "def GetVendors(self,args,vendors): \"\"\" GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) \"\"\" pass def GetVendorsExpected(self,vendors): \"\"\"", "DataSet) -> XmlSchema \"\"\" pass def GetSerializationData(self,*args): \"\"\" GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes", "an instance of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object:", "Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid)", "the current System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs]", "instance of the class\"\"\" return DataSet() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "\"\"\" __new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) \"\"\" pass def __reduce_ex__(self,*args): pass def", "-> (int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\" GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) \"\"\"", "General,script: str,cacheKey: int) -> object \"\"\" pass def ExecuteScriptWithScope(self,script,scope): \"\"\" ExecuteScriptWithScope(self: General,script: str,scope:", "pass def GetTranslationsAvailable(self,translations): \"\"\" GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\"", "indicating whether schema information has been omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader)", "Dynamically invokes (late-bound) the method represented by the current delegate. args: An array", "(int,PackCustomers) \"\"\" pass @staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass @staticmethod", "-> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) \"\"\"", "AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) \"\"\" pass def AuthenticateUserForDefaultZone(self,remId): \"\"\" AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) \"\"\"", "return OnGetDestinationLocationForLine() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): \"\"\" BeginInvoke(self:", "Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self):", "-> (int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines)", "the class\"\"\" return CallerContext() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def __init__(self,*args):", "DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) \"\"\" pass def ExecuteSummaries(self): \"\"\" ExecuteSummaries(self: NotificationSummary) \"\"\" pass def", "int) -> AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch)", "def GetTagsAll(self,tags): \"\"\" GetTagsAll(self: General) -> (int,Tags) \"\"\" pass def GetTagsByDescription(self,filter,tags): \"\"\" GetTagsByDescription(self:", "DataFlowObject[WarehouseLayoutSetting] \"\"\" pass def CreateZone(self,arg): \"\"\" CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] \"\"\" pass", "\"\"\" pass def UpdateDatabase(self,message): \"\"\" UpdateDatabase(self: General) -> (bool,str) \"\"\" pass def UpdatePrintLine(self,key,line):", "\"\"\" RemoveBatch(batch: Batch) \"\"\" pass def RemoveDirectOrder(self,args): \"\"\" RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) \"\"\" pass", "<==> x[y] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "GetHistoryShipmentsAll(self,pagingParams,shipments): \"\"\" GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self:", "\"\"\" GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] \"\"\" pass def GetPrinters(self): \"\"\" GetPrinters(self: DocumentQueue)", "AddTaskAutoDisposeTask(self): \"\"\" AddTaskAutoDisposeTask(self: General) \"\"\" pass def AddTaskCacheBackgroundTasks(self): \"\"\" AddTaskCacheBackgroundTasks(self: General) \"\"\" pass", "removed. \"\"\" pass def OnRemoveTable(self,*args): \"\"\" OnRemoveTable(self: DataSet,table: DataTable) Occurs when a System.Data.DataTable", "def OnPythonEngineBooted(self): \"\"\" OnPythonEngineBooted(self: General) \"\"\" pass def OutputCacheStatusToLog(self): \"\"\" OutputCacheStatusToLog(self: General) \"\"\"", "UnityContainer \"\"\" pass def Init(self,appSettings,authoritySystem): \"\"\" Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) \"\"\" pass def", "Inbound) -> (int,RmaReasons) \"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) ->", "\"\"\" DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] \"\"\" pass def DeleteScriptTask(self,arg): \"\"\" DeleteScriptTask(self: General,arg:", "def SaveErpSetting(self,memberName,value): \"\"\" SaveErpSetting(self: General,memberName: str,value: object) \"\"\" pass def SaveModule(self,module): \"\"\" SaveModule(self:", "\"\"\" pass def DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def", "of this System.MulticastDelegate that is equal to the specified delegate. value: The delegate", "GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) \"\"\" pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod:", "name of the property that is about to change. \"\"\" pass def ReadXmlSerializable(self,*args):", "pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: CurrentLicense(self: RemotePublishing) -> License", "MarkGroupAsRead(self,groupKey,userId): \"\"\" MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity:", "to combine with this delegate. Returns: A delegate that is the new root", "str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo:", "\"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\" GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass def", "General) \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a", "\"\"\" GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) \"\"\" pass def GetZonesActiveOfUser(self,user,zones): \"\"\" GetZonesActiveOfUser(self: General,user: User)", "\"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) \"\"\" pass def", "UpdateBarcodeSettings(self,dfObject): \"\"\" UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self:", "pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\"", "pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\" pass", "GetAllocationSettingsByProfile(self,id): \"\"\" GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings \"\"\" pass def GetBatchByCacheKey(self,cacheKey,batch): \"\"\" GetBatchByCacheKey(self:", "CachedSettings(self: General) -> SystemSettings \"\"\" CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "\"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool \"\"\" pass", "def ReissueMessages(self,messageIds): \"\"\" ReissueMessages(self: Messaging,messageIds: List[Guid]) \"\"\" pass def SaveMessageBody(self,messageId,decodeAs,messageBody): \"\"\" SaveMessageBody(self: Messaging,messageId:", "pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\"", "SyncStock(self: Inventory) \"\"\" pass def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\"", "\"\"\" GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id:", "\"\"\" PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\"", "None) \"\"\"Get: UserName(self: CallerContext) -> str \"\"\" class Constants(object): # no doc def", "\"\"\" pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\" pass def", "instance of the class\"\"\" def AddOrUpdateErpLock(self,lock): \"\"\" AddOrUpdateErpLock(self: General,lock: ErpLock) -> int \"\"\"", "pass def UploadNewLicense(self,xml,license): \"\"\" UploadNewLicense(self: General,xml: str) -> (bool,License) \"\"\" pass def ValidateColliReferences(self,dfObject):", "\"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) -> bool \"\"\" pass def", "\"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey:", "def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\"", "(int,RmaOrders) \"\"\" pass def GetRmaReasons(self,reasons): \"\"\" GetRmaReasons(self: Inbound) -> (int,RmaReasons) \"\"\" pass def", "PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool \"\"\" pass def PrintReceiveLabels(self,line,quantity,label): \"\"\" PrintReceiveLabels(self:", "(int,ZoneUsers) \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: General) -> object \"\"\" pass def", "-> str \"\"\" GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: GCloudPubSubPrefix(self: IApplicationSettings)", "GetItemLocationDefault(self,args,location): \"\"\" GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self:", "ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\" pass def __getitem__(self,*args): \"\"\" x.__getitem__(y)", "\"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] \"\"\" pass def", "def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): \"\"\" GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): \"\"\"", "OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) \"\"\" pass", "\"\"\" pass def GetMessage(self,messageId): \"\"\" GetMessage(self: Messaging,messageId: Guid) -> IMessage \"\"\" pass def", "\"\"\" pass def GetPrintRuleConditions(self,printRuleId): \"\"\" GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] \"\"\" pass def", "GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location \"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\"", "-> DataFlowObject[NumberRange] \"\"\" pass def UpdateNumberRange(self,dfObject): \"\"\" UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\"", "release both managed and unmanaged resources; false to release only unmanaged resources. \"\"\"", "def GetPurchaseOrderLines(self,args,purchaseOrderLines): \"\"\" GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) \"\"\" pass def GetPurchaseOrderPrintLines(self,key,lines): \"\"\"", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\"", "of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity", "removed from a System.Data.DataTable. relation: The System.Data.DataRelation being removed. \"\"\" pass def OnRemoveTable(self,*args):", "\"\"\" GetTranslationsAvailable(self: General) -> (int,Translations) \"\"\" pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): \"\"\" GetTranslationsAvailablePerSet(self: General,resourseSet: str)", "IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return General() instance=ZZZ() \"\"\"hardcoded/returns", "LocationClassification) -> (int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations)", "pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) \"\"\" pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items):", "GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) \"\"\" pass def GetScriptTaskById(self,id,task): \"\"\" GetScriptTaskById(self: General,id: int) ->", "CacheKey) -> bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) ->", "pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): \"\"\" CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) -> bool", "GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase)", "CreateDatabase(self: General) -> (bool,str) \"\"\" pass def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) ->", "\"\"\" pass def RestartScriptEngine(self): \"\"\" RestartScriptEngine(self: General) \"\"\" pass def SaveCache(self): \"\"\" SaveCache(self:", "'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): \"\"\" DataSet() \"\"\" def ZZZ(self):", "GetPreReceiptSummaries(self,purchaseOrdernumber): \"\"\" GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self:", "General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\" pass def CreateLocationClassification(self,arg): \"\"\" CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) ->", "-> (int,Customers) \"\"\" pass def GetHistoryOutboundOrderItems(self,args,items): \"\"\" GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) \"\"\"", "for signature \"\"\" pass @staticmethod def __new__(self,stockManager,messaging,general): \"\"\" __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral)", "-> (OutboundOrderLines,OutboundOrderLines) \"\"\" pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): \"\"\" GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) \"\"\"", "str) -> (bool,BatchBase) \"\"\" pass def GetBatchesAll(self,batches): \"\"\" GetBatchesAll(self: Outbound) -> (int,Batches) \"\"\"", "Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) \"\"\" pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): \"\"\" GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams)", "System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in its binary format,false otherwise. \"\"\" pass def", "General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] \"\"\" pass def UpdateCultureOfUserSession(self): \"\"\" UpdateCultureOfUserSession(self: General) \"\"\" pass", "None,lambda self: None) \"\"\"Get: RemotingPortNr(self: IApplicationSettings) -> int \"\"\" RemotingTcpChannelName=property(lambda self: object(),lambda self,v:", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def", "PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def", "A shallow copy of the current System.Object. \"\"\" pass def PrintPickBatchLabel(self,dfObject): \"\"\" PrintPickBatchLabel(self:", "Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) -> bool \"\"\" pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): \"\"\"", "General,oZonesUsersProxy: ViewUsersInZone) -> Users \"\"\" pass def CreateBarcodeStructureDefinition(self,arg): \"\"\" CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) ->", "int,shipmentPackageId: int) -> (int,ItemIdentifications) \"\"\" pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): \"\"\" GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk:", "def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\"", "PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] \"\"\" pass def GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\"", "DataFlowObject[CreatePreReceiptArgs] \"\"\" pass def DeletePreReceipLines(self,dfObject): \"\"\" DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] \"\"\" pass", "-> (bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\"", "AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) \"\"\" pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange]", "def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): \"\"\" AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool \"\"\" pass def", "bool \"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) \"\"\" pass", "self: None) \"\"\"Get: StockManager(self: Outbound) -> IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general: General)", "(int,Customers) \"\"\" pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass", "pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\" pass def GetScanners(self): \"\"\"", "def GetAppDomainList(self): \"\"\" GetAppDomainList(self: General) -> List[AppDomainInformation] \"\"\" pass def GetBackgroundAgentById(self,id,agent): \"\"\" GetBackgroundAgentById(self:", "Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) \"\"\" pass def ItemBelongsToLicensePlate(self,args): \"\"\" ItemBelongsToLicensePlate(self: Inventory,args:", "def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\"", "Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000", "DataFlowObject[BarcodeStructureDefinition] \"\"\" pass def CreateColliPreset(self,arg): \"\"\" CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass", "PrintLinesBase,label: PrintLabel) -> bool \"\"\" pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): \"\"\" PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs:", "\"\"\" AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool \"\"\" pass def BatchChangeCountType(self,filterBy,type): \"\"\" BatchChangeCountType(self:", "OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] \"\"\"", "GetStockManagerList(self,filterBy,pagingParams,stockList): \"\"\" GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\"", "Inventory,key: CacheKey,itemCode: str) -> bool \"\"\" pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): \"\"\" RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode:", "\"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ]", "General) \"\"\" pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self: General) \"\"\" pass def AddTaskMessageQueueCleanupTask(self): \"\"\"", "General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\" GetWarehousesActive(self: General) -> (int,Warehouses)", "\"\"\" DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] \"\"\" pass def DeleteBarcodeStructureDefinition(self,arg): \"\"\" DeleteBarcodeStructureDefinition(self: General,arg:", "\"\"\" GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) \"\"\" pass def GetPrintLabels(self,labels): \"\"\" GetPrintLabels(self: General)", "def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\"", "\"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda", "CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): \"\"\" CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message):", "-> bool \"\"\" pass def AttachClient(self,endPoint): \"\"\" AttachClient(self: General,endPoint: str) \"\"\" pass def", "DeleteTag(self,arg): \"\"\" DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg): \"\"\" DeleteUser(self:", "\"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args): \"\"\"", "def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return NotificationCenter() instance=ZZZ() \"\"\"hardcoded/returns an instance", "GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] \"\"\" pass def GetPrintJobAuditLog(self,printJobId,paging): \"\"\" GetPrintJobAuditLog(self:", "-> (int,PackCustomers) \"\"\" pass @staticmethod def GetDefaultAllocationSettings(): \"\"\" GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass", "-> (int,Devices) \"\"\" pass def GetErpLocks(self,locks): \"\"\" GetErpLocks(self: General) -> (int,List[ErpLock]) \"\"\" pass", "DocumentQueue) -> List[Printer] \"\"\" pass def GetPrintJobAttributes(self,printJobId): \"\"\" GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str]", "Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\" pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): \"\"\" GetItemsToPack(self: Outbound,args: GetItemsToPackArgs)", "DocumentQueue(MarshalByRefObject): \"\"\" DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "CacheKey,itemCode: str,itemId: str) -> bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode:", "def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): \"\"\" GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\"", "\"\"\" pass def UploadModule(self,arg): \"\"\" UploadModule(self: General,arg: AddModuleArgs) -> bool \"\"\" pass def", "IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return DocumentQueue()", "\"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) ->", "General,*translations: Array[SaveTranslationArgs]) \"\"\" pass def ScheduleScriptTasks(self): \"\"\" ScheduleScriptTasks(self: General) \"\"\" pass def SendBroadcastMessage(self,message):", "-> bool \"\"\" pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] \"\"\"", "\"\"\" pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): \"\"\" GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) \"\"\" pass", "see x.__class__.__doc__ for signature \"\"\" pass def __repr__(self,*args): \"\"\" __repr__(self: object) -> str", "(bool,User) \"\"\" pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self: General,username: str) -> (bool,User) \"\"\" pass", "RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self:", "-> (int,ScriptTasks) \"\"\" pass def GetScriptTasksAll(self,tasks): \"\"\" GetScriptTasksAll(self: General) -> (int,ScriptTasks) \"\"\" pass", "signature \"\"\" pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) \"\"\" def ZZZ(self):", "ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] \"\"\" pass def ProcessBatchPicking(self,dfObject): \"\"\" ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs])", "Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): \"\"\" GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str)", "\"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str \"\"\" pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate)", "def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\" RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) \"\"\" pass def UpdatePreReceiptStatus(self,dfObject): \"\"\" UpdatePreReceiptStatus(self:", "(bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def", "General) \"\"\" pass def LogoutUser(self): \"\"\" LogoutUser(self: General) \"\"\" pass def MemberwiseClone(self,*args): \"\"\"", "Returns: A shallow copy of the current System.Object. \"\"\" pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): \"\"\"", "def ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def", "General,itemCode: str,itemIds: List[str]) -> bool \"\"\" pass def GetItemIdentifications(self,args,selected,itemIdentifications): \"\"\" GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected:", "General) -> (int,ScriptTasks) \"\"\" pass def GetScriptTasksInActive(self,tasks): \"\"\" GetScriptTasksInActive(self: General) -> (int,ScriptTasks) \"\"\"", "-> AppVersions \"\"\" pass def GetCurrentAppVersion(self): \"\"\" GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion \"\"\" pass", "pass def PutBackFromBatch(self,dfObject): \"\"\" PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] \"\"\" pass def PutItemIdBackFromBatch(self,dfObject):", "-> (int,HistoryRmaOrderLines) \"\"\" pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): \"\"\" GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders)", "DataFlowObject[User]) -> DataFlowObject[User] \"\"\" pass def CreateWarehouseLayoutSetting(self,arg): \"\"\" CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting]", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: OfflineScanning) -> object \"\"\" pass def IsBosInboundListenerRunning(self): \"\"\"", "-> DataFlowObject[Tag] \"\"\" pass def DeleteUser(self,arg): \"\"\" DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] \"\"\"", "Inbound,purchaseOrders: PurchaseOrders) \"\"\" pass def CancelPendingRmaOrderReceipts(self,rmaOrders): \"\"\" CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) \"\"\" pass def", "-> (int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script): \"\"\" GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\"", "pass def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg: ModuleArgs) -> bool \"\"\" pass def DeletePrintLabel(self,arg):", "-> (bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) ->", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass class Inbound(MarshalByRefObject): \"\"\"", "an instance of the class\"\"\" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): \"\"\" AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) ->", "(int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) \"\"\" pass", "str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo:", "that the specified System.Data.DataSet property is about to change. name: The name of", "DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] \"\"\" pass def PrintDocumentsOfShipment(self,args): \"\"\" PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool", "pass def GetWarehousesWithPendingCounts(self,warehouses): \"\"\" GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) \"\"\" pass def GetWarehouseTransfer(self,key): \"\"\"", "GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) \"\"\" pass def GetCacheObject(self,hashCode): \"\"\"", "this component. \"\"\" PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PurchaseOrders_GetHistoryLines(self: DataSet)", "\"\"\" GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy:", "def ExecuteScriptTaskOnce(self,id): \"\"\" ExecuteScriptTaskOnce(self: General,id: int) -> object \"\"\" pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): \"\"\"", "System.Xml.XmlReader instance that is passed during deserialization of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode", "pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] \"\"\" pass def DeleteReplenishmentOrderLines(self,dfObject):", "\"\"\" pass def GetCountByCountId(self,countId,count): \"\"\" GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) \"\"\" pass def", "GetPreReceiptReceiveLines(self,dfObject): \"\"\" GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] \"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self:", "\"\"\" pass def GetShipmentServices(self,shipperId,packagesKey,services): \"\"\" GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass", "object(),lambda self,v: None,lambda self: None) \"\"\"Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" class ICentralAuthoritySystem:", "str,itemId: str) -> bool \"\"\" pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): \"\"\" SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity:", "\"\"\" pass def ShouldSerializeTables(self,*args): \"\"\" ShouldSerializeTables(self: DataSet) -> bool \"\"\" pass def __enter__(self,*args):", "\"\"\" pass def CreateLicensePlateFromReceipt(self,args): \"\"\" CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate \"\"\" pass def", "GetDefaultBatchSink() -> BatchAllocationSink \"\"\" pass def GetDirectOrder(self,args): \"\"\" GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder]", "System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId: Guid) \"\"\" pass def RedispatchPrintJobWithPrinter(self,args):", "def GetProcessCountsProgress(self,percentageComplete,message): \"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self:", "\"\"\" ShouldSerializeTables(self: DataSet) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) ->", "bool \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: NotificationCenter) -> object \"\"\" pass def", "__new__(cls: type,stockManager: IStockManager,messaging: Messaging) \"\"\" pass Messaging=property(lambda self: object(),lambda self,v: None,lambda self: None)", "BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) \"\"\" pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): \"\"\" GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum])", "instance of the class\"\"\" return AppHost() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): \"\"\" WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) \"\"\" pass def GetZoneScripts(self,arg,scripts): \"\"\" GetZoneScripts(self: General,arg: GetScriptArgs)", "DataFlowObject[CountGroup] \"\"\" pass def CreateCountsForPickDifferences(self,batch): \"\"\" CreateCountsForPickDifferences(self: Inventory,batch: Batch) \"\"\" pass def CreateLicensePlate(self,lp):", "# encoding: utf-8 # module Wms.RemotingImplementation calls itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> #", "DataFlowObject[StorageAssignmentClassification] \"\"\" pass def CreateTag(self,arg): \"\"\" CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] \"\"\" pass", "\"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\" pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): \"\"\" ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description:", "-> bool \"\"\" pass def CloseTransportPackages(self,packagesKey): \"\"\" CloseTransportPackages(self: Outbound,packagesKey: CacheKey) \"\"\" pass def", "pass def GetAllExecutionTypes(self): \"\"\" GetAllExecutionTypes(self: NotificationSummary) -> List[str] \"\"\" pass def GetConfigurationForm(self,executionType): \"\"\"", "\"\"\" pass def GetZonesAll(self,zones): \"\"\" GetZonesAll(self: General) -> (int,Zones) \"\"\" pass def GetZoneScriptHook(self,arg,script):", "GetUserByUserId(self,userId,user): \"\"\" GetUserByUserId(self: General,userId: int) -> (bool,User) \"\"\" pass def GetUserByUserName(self,username,user): \"\"\" GetUserByUserName(self:", "def CleanupUserCacheData(self): \"\"\" CleanupUserCacheData(self: General) \"\"\" pass def ClearResourceCache(self): \"\"\" ClearResourceCache(self: General) \"\"\"", "delegate. value: The delegate to search for in the invocation list. Returns: If", "Printing) -> (int,List[str]) \"\"\" pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) -> Hashtable \"\"\"", "CreateMessage(self: Messaging,message: IMessage) \"\"\" pass def DeleteMessageByGuid(self,messageId): \"\"\" DeleteMessageByGuid(self: Messaging,messageId: Guid) \"\"\" pass", "def GetCountGroupsById(self,id): \"\"\" GetCountGroupsById(self: Inventory,id: int) -> CountGroup \"\"\" pass def GetCountGroupsByType(self,type): \"\"\"", "\"\"\" pass class General(MarshalByRefObject): \"\"\" General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "\"\"\" GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] \"\"\" pass def GetItemStockAllocations(self,filterBy,allocations): \"\"\" GetItemStockAllocations(self: Inventory,filterBy:", "General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity)", "GetDirectOrdersPending(self): \"\"\" GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] \"\"\" pass def GetDocumentsOfShipment(self,shipmentPk,documents): \"\"\" GetDocumentsOfShipment(self: Outbound,shipmentPk:", "ReportsPackingSlipFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\\\Program Files (x86)\\\\TranCon\\\\BOXwisePro\\\\Server\\\\Layouts\\\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\\\Program", "CacheKey) -> (int,FindableList[MobileService]) \"\"\" pass def GetShipperById(self,shipperId,shipper): \"\"\" GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase)", "(bool,CacheKey,Batch) \"\"\" pass def OpenTransferPackagesForShipping(self,key,packages): \"\"\" OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) \"\"\" pass", "pass def ChangeItemBarcode(self,args): \"\"\" ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool \"\"\" pass def CheckHookVersions(self):", "\"\"\" pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): \"\"\" GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) \"\"\" pass", "ResetBarcodeSettingsToDefault(self): \"\"\" ResetBarcodeSettingsToDefault(self: General) -> bool \"\"\" pass def ResetPrintLines(self,key,printLines): \"\"\" ResetPrintLines(self: General,key:", "\"\"\"Get: RemotingTcpChannelName(self: IApplicationSettings) -> str \"\"\" RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None)", "List[str] \"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\" pass", "-> (int,HistoryRmaOrders) \"\"\" pass def GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\"", "GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) \"\"\" pass def GetHistoryRmaOrderLines(self,args,orderLines): \"\"\" GetHistoryRmaOrderLines(self: Inbound,args:", "GetPickListsForSettings(self: Printing) -> (int,List[str]) \"\"\" pass def GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) -> Hashtable", "\"\"\" pass def SubtractWarehouseTransferQuantities(self,key,items): \"\"\" SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool \"\"\" pass", "generator 1.145 # no doc # no important from System.Collections.Generic import * from", "pass def GetPurchaseOrdersAll(self,purchaseOrders): \"\"\" GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) \"\"\" pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): \"\"\"", "PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): \"\"\" PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) \"\"\" pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId):", "Combines this System.Delegate with the specified System.Delegate to form a new delegate. follow:", "(int,Tags) \"\"\" pass def GetTagsByType(self,target,tags): \"\"\" GetTagsByType(self: General,target: TagTarget) -> (int,Tags) \"\"\" pass", "DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] \"\"\" pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): \"\"\" UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine)", "\"\"\" pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): \"\"\" FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str \"\"\"", "GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): \"\"\" GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) \"\"\" pass def GetMobileShipperById(self,shipperId,shipper):", "\"\"\" DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] \"\"\" pass def DeleteModule(self,arg): \"\"\" DeleteModule(self: General,arg:", "(int,ItemLocations) \"\"\" pass @staticmethod def GetStdLibRoot(path): \"\"\" GetStdLibRoot() -> (bool,str) \"\"\" pass def", "value in its invocation list; otherwise,this instance with its original invocation list. \"\"\"", "Array[Guid]) \"\"\" pass def DeletePrintRule(self,printRuleId): \"\"\" DeletePrintRule(self: DocumentQueue,printRuleId: int) \"\"\" pass def DeletePrintRules(self,ruleIds):", "List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult \"\"\" pass def ProcessWarehouseTransfer(self,dfObject): \"\"\" ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs]", "\"\"\" pass @staticmethod def GetTypedDataSetSchema(xs): \"\"\" GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType \"\"\" pass def", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general,messaging): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging:", "\"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool \"\"\" pass def StartRemotePublishingInboundListener(self): \"\"\" StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) ->", "def AddDirectOrderLineItemIdentifications(self,args): \"\"\" AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): \"\"\"", "pass def ProcessShipment(self,arg): \"\"\" ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentInfo(self,shipment,packages,arg):", "\"\"\" Trace(msg: str) \"\"\" pass @staticmethod def Warn(*__args): \"\"\" Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException)", "General,labelId: str) -> Array[Byte] \"\"\" pass def GetPrintLabelMappings(self,labelId,mappings): \"\"\" GetPrintLabelMappings(self: General,labelId: int) ->", "\"\"\" GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user:", "def ScanItemForPacking(self,args,result): \"\"\" ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\"", "GetChacheStatus(self): \"\"\" GetChacheStatus(self: General) -> str \"\"\" pass def GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id:", "DeleteScanner(self,args): \"\"\" DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: OfflineScanning,filePath: str)", "BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\" pass def ProcessDirectOrder(self,args): \"\"\" ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) ->", "DeleteRemotePublisher(self,req): \"\"\" DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) \"\"\" pass def DownloadFileAsync(self,filePath): \"\"\" DownloadFileAsync(self: RemotePublishing,filePath: str)", "object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass", "\"\"\" pass def GetUserCacheData(self,tag): \"\"\" GetUserCacheData(self: General,tag: str) -> str \"\"\" pass def", "def ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args): \"\"\"", "unmanaged resources; false to release only unmanaged resources. \"\"\" pass def GetSchemaSerializable(self,*args): \"\"\"", "-> (bool,List[str],License) \"\"\" pass def CheckServerHealth(self): \"\"\" CheckServerHealth(self: General) -> ServerHealthEnum \"\"\" pass", "pass def GetWarehousesInactive(self,warehouses): \"\"\" GetWarehousesInactive(self: General) -> (int,Warehouses) \"\"\" pass def GetZoneById(self,id,zone): \"\"\"", "-> NumberRange \"\"\" pass def GetNumberRangesByFilter(self,args): \"\"\" GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] \"\"\"", "GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\" GetRmaOrderLines(self: Inbound,args:", "-> (int,WarehouseLayoutSettings) \"\"\" pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): \"\"\" GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool", "\"\"\" pass def GetPreReceipts(self,args,preReceipts): \"\"\" GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) \"\"\" pass def", "\"\"\" GetBoxColors(self: Outbound) -> Array[Color] \"\"\" pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): \"\"\" GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs])", "RestartGooglePubSubServices(self): \"\"\" RestartGooglePubSubServices(self: ICentralAuthoritySystem) \"\"\" pass def StartBosInboundListener(self): \"\"\" StartBosInboundListener(self: ICentralAuthoritySystem) -> bool", "def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass def StopProfiler(self): \"\"\" StopProfiler(self: General) \"\"\"", "\"\"\" CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] \"\"\" pass def CreateReplenishmentOrder(self,order): \"\"\" CreateReplenishmentOrder(self:", "SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Shipment_GetHistoryShipmentLines(self: DataSet)", "of the class\"\"\" return Messaging() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def", "-> (int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors): \"\"\" GetBoxColors(self: Outbound) -> Array[Color] \"\"\" pass", "for signature \"\"\" pass @staticmethod def __new__(self,appSettings,general): \"\"\" __new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\"", "\"\"\" pass def LogAndCleanupShipment(self,shipment,packages,arg): \"\"\" LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\"", "GetUsersActive(self: General) -> (int,Users) \"\"\" pass def GetUsersAll(self,users): \"\"\" GetUsersAll(self: General) -> (int,Users)", "\"\"\" OnGetDestinationLocationForLine(object: object,method: IntPtr) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return", "Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) \"\"\" pass def RemoveTransportPackages(self,packagesKey): \"\"\" RemoveTransportPackages(self: Outbound,packagesKey: CacheKey)", "the class\"\"\" return PyLogger() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" @staticmethod def", "GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) \"\"\" pass def GetStockOnMatchingFilter(self,args): \"\"\" GetStockOnMatchingFilter(self: Inventory,args:", "__new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) \"\"\" pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self:", "General,arg: GetScriptArgs) -> (int,ZoneScripts) \"\"\" pass def GetZonesOfUser(self,user,addActiveOnly,zones): \"\"\" GetZonesOfUser(self: General,user: User,addActiveOnly: bool)", "Sleep(self,seconds): \"\"\" Sleep(self: General,seconds: int) -> str \"\"\" pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): \"\"\" StartDiscoveryServer(self:", "Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] \"\"\" pass def GetPurchaseOrder(self,args,purchaseOrder): \"\"\" GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) ->", "\"\"\" pass def GetShipperServiceLinksAll(self,shipperServiceLinks): \"\"\" GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) \"\"\" pass @staticmethod def", "-> DataFlowObject[ShipperServiceLink] \"\"\" pass def CreateSnippetModule(self,arg): \"\"\" CreateSnippetModule(self: General,arg: ModuleArgs) -> bool \"\"\"", "an instance of the class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str \"\"\"", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: Relations(self: DataSet) -> DataRelationCollection \"\"\" RmaOrders_GetHistoryLines=property(lambda", "def GetSessions(self,sessions): \"\"\" GetSessions(self: General) -> (int,Sessions) \"\"\" pass def GetSettings(self): \"\"\" GetSettings(self:", "bool \"\"\" pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\" pass def", "Outbound) -> IStockManager \"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock", "pass @staticmethod def Info(msg): \"\"\" Info(msg: str) \"\"\" pass @staticmethod def Trace(msg): \"\"\"", "\"\"\" pass def GetConfigurationForm(self,executionType): \"\"\" GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm \"\"\" pass def", "Delegate Combines this System.Delegate with the specified System.Delegate to form a new delegate.", "pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): \"\"\" ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool \"\"\"", "\"\"\" pass def AddTaskErpLockingTask(self): \"\"\" AddTaskErpLockingTask(self: General) \"\"\" pass def AddTaskLogCleanupTask(self): \"\"\" AddTaskLogCleanupTask(self:", "\"\"\" GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool \"\"\" pass def GenerateReplenishmentOrders(self,args): \"\"\" GenerateReplenishmentOrders(self: Inventory,args:", "GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo): \"\"\" GetItemInfoFromBarcode(self: General,barcode: str)", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__", "-> (int,ItemIdentifications) \"\"\" pass def GetItemIdentificationsAvailable(self,args,itemIds): \"\"\" GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) \"\"\"", "-> CacheKey \"\"\" pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): \"\"\" PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) ->", "(bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\" pass", "Outbound,args: GetCustomersArgs) -> (int,Customers) \"\"\" pass def GetCustomersPending(self,customers): \"\"\" GetCustomersPending(self: Outbound) -> (int,Customers)", "DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] \"\"\" pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): \"\"\" PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs]", "str) -> (int,PrintLabels) \"\"\" pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): \"\"\" GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels)", "DeleteLicensePlateItemById(self,itemId): \"\"\" DeleteLicensePlateItemById(self: Inventory,itemId: int) \"\"\" pass def DeleteReplenishmentOrder(self,order): \"\"\" DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder])", "The object returned by the method represented by the delegate. \"\"\" pass def", "\"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) \"\"\" pass def GetScriptTasksActive(self,tasks): \"\"\" GetScriptTasksActive(self: General)", "Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) \"\"\" pass def SkipOrderForProcessingPack(self,batchId,orderNumber): \"\"\" SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str)", "\"\"\" pass def GetLicensePlateById(self,licensePlateId,licensePlate): \"\"\" GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) \"\"\" pass def", "\"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool \"\"\" pass", "\"\"\" GetProcessCountsProgress(self: Inventory) -> (int,str) \"\"\" pass def GetReplenishmentOrder(self,args,replenishmentOrder): \"\"\" GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs)", "-> (bool,str) \"\"\" pass def CreateDevice(self,arg): \"\"\" CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] \"\"\"", "List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) \"\"\" pass def DeleteBatchById(self,batchId): \"\"\" DeleteBatchById(self: Outbound,batchId:", "Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage)", "\"\"\"hardcoded/returns an instance of the class\"\"\" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\\\Program Files", "\"\"\" pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): \"\"\" Outbound(stockManager: IStockManager,messaging: Messaging) \"\"\" def", "the serialized representation of the DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext", "\"\"\" pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): \"\"\" IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) \"\"\"", "is invoked with during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during", "MailgunDomainBoxwise(self: IApplicationSettings) -> str \"\"\" Options=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get:", "BackgroundAgentType) -> BackgroundAgentStatus \"\"\" pass def GetBarcodeSettingsAll(self,types): \"\"\" GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) \"\"\"", "\"\"\"Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str \"\"\" GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self: None)", "Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) \"\"\" pass def GetLicensePlates(self,args,pagingParams,licensePlates): \"\"\" GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams:", "def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): \"\"\" GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetPurchaseOrderLines(self,args,purchaseOrderLines):", "\"\"\" pass def GetUsedAttributeValuesAsObject(self,attributeName): \"\"\" GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] \"\"\" pass def", "* # no functions # classes class AppHost(object): \"\"\" AppHost() \"\"\" def ZZZ(self):", "str) -> (bool,ScriptTask) \"\"\" pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): \"\"\" GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime)", "\"\"\" pass def GetMessageBodyAsString(self,messageId,decodeAs): \"\"\" GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str \"\"\" pass", "__new__(cls: type,appSettings: IApplicationSettings,general: General) \"\"\" pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None)", "(bool,ItemLocation) \"\"\" pass def GetItemLocations(self,args,locations): \"\"\" GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) \"\"\" pass", "GetPrintDatasetInstance(self,datasetFullTypeName,dataset): \"\"\" GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) \"\"\" pass def GetPrintDatasets(self,datasets): \"\"\" GetPrintDatasets(self:", "GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): \"\"\" GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) \"\"\" pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): \"\"\" GetWarehouseLayoutSettingById(self:", "def AuthenticateUserForFirstZone(self,remId): \"\"\" AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) \"\"\" pass def AuthenticateUserForZone(self,selectedZone,remId): \"\"\" AuthenticateUserForZone(self:", "def GetBatchesWithPendingPackages(self,args,result): \"\"\" GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) \"\"\" pass def GetBoxColors(self,colors): \"\"\"", "current System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] \"\"\"", "GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) \"\"\" pass def GetRmaCustomersExpected(self,customers): \"\"\" GetRmaCustomersExpected(self: Inbound) ->", "str) -> Array[Byte] \"\"\" pass def GetScriptIntellisenseOptions(self,hint): \"\"\" GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str]", "found in the invocation list for this instance,then a new System.Delegate without value", "Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) \"\"\" pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): \"\"\" GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int)", "pass def GetItems(self,args,paging,items): \"\"\" GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) \"\"\" pass def", "ShouldSerializeTables(self: DataSet) -> bool \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object", "instance of the class\"\"\" return Outbound() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\"", "pass def PrintTestLabel(self,labelId,testRun): \"\"\" PrintTestLabel(self: General,labelId: int,testRun: bool) \"\"\" pass def PurgeProfilingLog(self): \"\"\"", "\"\"\" pass def DeleteNumberRange(self,dfObject): \"\"\" DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def", "GetDefaultAllocationSettings() -> AllocationSettings \"\"\" pass @staticmethod def GetDefaultBatchSink(): \"\"\" GetDefaultBatchSink() -> BatchAllocationSink \"\"\"", "\"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) \"\"\" pass def", "\"\"\" pass def DeleteStorageAssignmentClassification(self,arg): \"\"\" DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] \"\"\" pass def", "StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) \"\"\" pass def StopMarshalledObjectFactories(self): \"\"\" StopMarshalledObjectFactories(self: General) \"\"\" pass", "\"\"\" pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): \"\"\" PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey", "omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for", "def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): \"\"\" GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) \"\"\" pass def GetHistoryShipment(self,shipment,packages,shipperId):", "\"\"\" UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] \"\"\" pass def UpdatePackageData(self,args,newPackageData,packages): \"\"\" UpdatePackageData(self: Outbound,args:", "\"\"\" pass def GetLocationClassificationById(self,id,locationClassification): \"\"\" GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) \"\"\" pass def", "pass def CreateCount(self,arg): \"\"\" CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] \"\"\" pass def CreateCountFromCache(self,arg):", "\"\"\" pass def CopyPrintRule(self,printRuleId): \"\"\" CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule \"\"\" pass def", "pass def DisposeCachedObjects(self): \"\"\" DisposeCachedObjects(self: General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key:", "original invocation list. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "\"\"\" pass def CreateNumberRange(self,dfObject): \"\"\" CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def", "pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a static method represented", "GetCountGroups(self,filter,countGroups): \"\"\" GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) \"\"\" pass def GetCountGroupsAll(self,countGroups): \"\"\" GetCountGroupsAll(self:", "ICacheKeyConstructor[Count]) \"\"\" pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Mailer(object): \"\"\"", "DirectOrderLineCrudArgs) -> DataFlowObject[bool] \"\"\" pass def RemoveDirectOrderLineItemIdentification(self,args): \"\"\" RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine]", "GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsWithPendingPreReceipts(self,args,vendors): \"\"\" GetVendorsWithPendingPreReceipts(self:", "method represented by the delegate. \"\"\" pass def EndInvoke(self,result): \"\"\" EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult)", "GetPickListsTable(self): \"\"\" GetPickListsTable(self: Printing) -> Hashtable \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Printing)", "self: None) \"\"\"Get: Options(self: IApplicationSettings) -> str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda", "GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): \"\"\" GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) \"\"\" pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): \"\"\" GetStorageAssignmentClassifications(self:", "\"\"\" pass def GetBatchesIncomplete(self,batches): \"\"\" GetBatchesIncomplete(self: Outbound) -> (int,Batches) \"\"\" pass def GetBatchesIncompleteByFilter(self,args,batches):", "\"\"\" GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) \"\"\" pass def GetItemsOfVendor(self,args,items): \"\"\" GetItemsOfVendor(self: Inbound,args:", "DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] \"\"\" pass def DeleteDevice(self,arg): \"\"\" DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device]", "pass def GetRmaOrder(self,args,rmaOrder): \"\"\" GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) \"\"\" pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds):", "(DataFlowObject[ReceiveArgs],InboundReceiveLine) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "GetHistoryRmaReceiptById(self,groupGuid): \"\"\" GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder \"\"\" pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): \"\"\" GetInboundReceiveLinesByKey(self:", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass def __repr__(self,*args):", "pass def GetPrintJobs(self,args,paging): \"\"\" GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] \"\"\" pass def", "def GetMobileShipperById(self,shipperId,shipper): \"\"\" GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) \"\"\" pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): \"\"\"", "\"\"\" pass def LoadCache(self): \"\"\" LoadCache(self: General) \"\"\" pass def LoadSettings(self,*__args): \"\"\" LoadSettings(self:", "pass def GetItemImageSmall(self,itemCode): \"\"\" GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] \"\"\" pass def GetItemInfoFromBarcode(self,barcode,itemInfo):", "\"\"\" pass def EnsureLicenseExists(self): \"\"\" EnsureLicenseExists(self: RemotePublishing) \"\"\" pass def GetRemotePublishers(self): \"\"\" GetRemotePublishers(self:", "'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping',", "will cause the object to be assigned a new identity when it is", "pass def AddCountQuantity(self,key,quantity,overwriteIfExists): \"\"\" AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool \"\"\" pass", "GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) \"\"\" pass def GetVendorsExpectedByFilter(self,vendors,args): \"\"\" GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) ->", "ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def ChangeLicensePlateStatus(self,args): \"\"\" ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs)", "Inventory,key: CacheKey) -> WarehouseTransfer \"\"\" pass def GetWarehouseTransferItems(self,key): \"\"\" GetWarehouseTransferItems(self: Inventory,key: CacheKey) ->", "General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) \"\"\" pass def GetItemStockList(self,args,itemStockLocationList): \"\"\" GetItemStockList(self: General,args: GetItemStockListArgs)", "ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str]", "\"\"\" class Printing(MarshalByRefObject): \"\"\" Printing(general: General) \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the", "module Wms.RemotingImplementation calls itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 #", "BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) \"\"\" pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): \"\"\" CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType:", "General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult \"\"\" pass def ValidateTransportPackageScan(self,barcode,result): \"\"\" ValidateTransportPackageScan(self: General,barcode: str)", "-> LicensePlateItem \"\"\" pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): \"\"\" CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) \"\"\" pass", "def TransferItems(self,arg): \"\"\" TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] \"\"\" pass def UpdateLicensePlate(self,lp): \"\"\"", "\"\"\" pass @staticmethod def GetLibRoot(): \"\"\" GetLibRoot() -> str \"\"\" pass def GetLocationClassificationById(self,id,locationClassification):", "\"\"\" GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] \"\"\" pass def GetCurrentNumber(self,rangeId): \"\"\" GetCurrentNumber(self: NumberGeneration,rangeId:", "the class\"\"\" return Printing() instance=ZZZ() \"\"\"hardcoded/returns an instance of the class\"\"\" def GetPickListsAll(self,pickLists):", "\"\"\"hardcoded/mock instance of the class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns an instance of the", "-> (int,Locations) \"\"\" pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): \"\"\" GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) \"\"\"", "None) \"\"\"Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str \"\"\" RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self:", "x; see x.__class__.__doc__ for signature \"\"\" pass class Inbound(MarshalByRefObject): \"\"\" Inbound(stockManager: IStockManager,messaging: IMessaging,general:", "ProcessShipmentInfo(self,shipment,packages,arg): \"\"\" ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] \"\"\" pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey):", "str \"\"\" pass def GetMethodImpl(self,*args): \"\"\" GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a static", "GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): \"\"\" GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) \"\"\" pass def", "\"\"\" pass def GetCountGroupsByType(self,type): \"\"\" GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup \"\"\" pass def", "ExceptionHelper() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return ExceptionHelper() instance=ZZZ() \"\"\"hardcoded/returns", "General) -> SystemSettings \"\"\" pass def GetSettingsTable(self): \"\"\" GetSettingsTable(self: General) -> SystemSettingsTable \"\"\"", "\"\"\" GetUsersInZone(self: General,zoneId: int) -> (int,Users) \"\"\" pass def GetVersion(self): \"\"\" GetVersion(self: General)", "bool \"\"\" pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): \"\"\" RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool \"\"\"", "AppHost,container: IUnityContainer) \"\"\" pass class BusinessLayerExtensions(object): # no doc def ZZZ(self): \"\"\"hardcoded/mock instance", "\"\"\" DisposeCachedObjects(self: General) \"\"\" pass def DisposeCachedObjectWhenUnchanged(self,key): \"\"\" DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) \"\"\" pass", "pass def GetCountriesActive(self,countries): \"\"\" GetCountriesActive(self: General) -> (int,Countries) \"\"\" pass def GetCurrentIdentity(self): \"\"\"", "General) -> (int,BackgroundAgents) \"\"\" pass def GetBackgroundAgentsByType(self,type,agents): \"\"\" GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents)", "\"\"\" pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): \"\"\" GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) \"\"\" pass", "def GetImplementedMethods(self): \"\"\" GetImplementedMethods(self: General) -> ImplementedFunctionalities \"\"\" pass def GetItem(self,itemCode,item): \"\"\" GetItem(self:", "\"\"\" GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] \"\"\" pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: DocumentQueue) ->", "DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] \"\"\" pass def GetUsedAttributeValues(self,attributeName): \"\"\" GetUsedAttributeValues(self: DocumentQueue,attributeName: str) ->", "\"\"\" GetColliPresetsAll(self: General) -> (int,ColliPresets) \"\"\" pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): \"\"\" GetColliPresetSpecificationCodes(self: General,searchText: str)", "pass def PrintPickList(self,args): \"\"\" PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool \"\"\" pass def PrintSSCCLabels(self,dfObject):", "bool \"\"\" pass def PrintDuplicateLabels(self,args): \"\"\" PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool \"\"\" pass", "ExecuteMessagePublisher(self,args): \"\"\" ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult \"\"\" pass def GetDistinctTypeList(self,args): \"\"\" GetDistinctTypeList(self:", "-> str \"\"\" RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: RemotingDictionarySettingName(self: IApplicationSettings)", "pass def DeleteNotificationsByReference(self,notificationFilter): \"\"\" DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) \"\"\" pass def GetAllNotificationGroups(self): \"\"\" GetAllNotificationGroups(self:", "GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool \"\"\" pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): \"\"\" GetItemIdentificationExistsMulti(self: General,itemCode:", "\"\"\" pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): \"\"\" PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool \"\"\"", "__new__(cls: type,general: General) \"\"\" pass class NotificationSummary(MarshalByRefObject): \"\"\" NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() \"\"\" def", "DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "\"\"\" SendBroadcastMessage(self: General,message: str) \"\"\" pass def SendBroadcastQuestion(self,question,possibleAnswers): \"\"\" SendBroadcastQuestion(self: General,question: str,possibleAnswers: int)", "Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) \"\"\" pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): \"\"\" GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) ->", "ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str) \"\"\" pass def ValidateItemIdentificationForDelivery(self,dfObject): \"\"\" ValidateItemIdentificationForDelivery(self:", "\"\"\" ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) \"\"\" pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): \"\"\" ValidateItemIdentification(self: General,itemCode:", "InitializeLifetimeService(self: Printing) -> object \"\"\" pass def MemberwiseClone(self,*args): \"\"\" MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) ->", "shallow copy of the current System.Object. \"\"\" pass def PreCreatePreReceipt(self,dfObject): \"\"\" PreCreatePreReceipt(self: Inbound,dfObject:", "shallow copy of the current System.Object. \"\"\" pass def RedispatchPrintJob(self,jobId): \"\"\" RedispatchPrintJob(self: DocumentQueue,jobId:", "\"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\" return Mailer() instance=ZZZ() \"\"\"hardcoded/returns an", "GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): \"\"\" GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) \"\"\" pass def GetRmaOrderLines(self,args,rmaOrderLines): \"\"\"", "GetColliPresetById(self,id,colliPreset): \"\"\" GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) \"\"\" pass def GetColliPresetsAll(self,colliPresets): \"\"\" GetColliPresetsAll(self:", "def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): \"\"\" PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey \"\"\" pass def", "pass def InitializeLifetimeService(self): \"\"\" InitializeLifetimeService(self: Outbound) -> object \"\"\" pass def InitOrderMatchesCustomerValidator(self): \"\"\"", "pass def InitializeDerivedDataSet(self,*args): \"\"\" InitializeDerivedDataSet(self: DataSet) \"\"\" pass def IsBinarySerialized(self,*args): \"\"\" IsBinarySerialized(self: DataSet,info:", "(bool,TransportPackages) \"\"\" pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): \"\"\" ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey]", "self: None) \"\"\"Get: MailgunDefaultSender(self: IApplicationSettings) -> str \"\"\" MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda", "-> bool \"\"\" pass def RemoveWarehouseTransfer(self,key): \"\"\" RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool \"\"\"", "DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value \"\"\" Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self:", "serialized representation of the DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object.", "-> (bool,WarehouseLayoutSetting) \"\"\" pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): \"\"\" GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) \"\"\"", "str,warehouseLocationTo: str) -> CacheKey \"\"\" pass def PrintLicensePlateLabels(self,args): \"\"\" PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) \"\"\"", "def GetWarehouseLocations(self,args,locations): \"\"\" GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) \"\"\" pass def GetWarehousesActive(self,warehouses): \"\"\"", "class\"\"\" class Messaging(MarshalByRefObject): \"\"\" Messaging() \"\"\" def ZZZ(self): \"\"\"hardcoded/mock instance of the class\"\"\"", "CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] \"\"\" pass def CreateZeroCount(self,arg): \"\"\" CreateZeroCount(self: Inventory,arg: DataFlowObject[Count])", "an instance of the class\"\"\" def AddNotification(self,notificationToInsert): \"\"\" AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) \"\"\" pass", "str \"\"\" PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Get: PdfPrintNetCompany(self: IApplicationSettings) ->", "of the current System.Object. \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "RemoveInboundReceiveLine(self,cacheKey,receiveLineId): \"\"\" RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool \"\"\" pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): \"\"\"" ]
[ "Enter the second string: COW # BIRD is longer than COW # ```", "COW # ``` def chk_strings(hello, goodbye): userinput1=[h e l l o] 0,1 2,3,", "Enter the first string: BIRD # Enter the second string: COW # BIRD", "Write a program that allows users to compare words by their length. Implement", "string passed into the function, otherwise, the function should return false. # DO", "the first string: BIRD # Enter the second string: COW # BIRD is", "string: COW # BIRD is longer than COW # ``` def chk_strings(hello, goodbye):", "first string: BIRD # Enter the second string: COW # BIRD is longer", "The function should return true if the first string parameter has more characters", "entered by the user and compares them by length # The function should", "that allows users to compare words by their length. Implement a function called", "parameter has more characters (i.e. longer) than the second string passed into the", "userinput1=[h e l l o] 0,1 2,3, 4 userinput2=[g o o d bye]", "def chk_strings(hello, goodbye): userinput1=[h e l l o] 0,1 2,3, 4 userinput2=[g o", "l o] 0,1 2,3, 4 userinput2=[g o o d bye] 0,1,2,4,5,6,7 userinput1=input(\"enter a", "the result in the function, print the result using the return value provided", "result using the return value provided by the function. # Example Input/Output: #", "(i.e. longer) than the second string passed into the function, otherwise, the function", "4 # Write a program that allows users to compare words by their", "Implement a function called chk_strings that accepts 2 strings entered by the user", "second string passed into the function, otherwise, the function should return false. #", "accepts 2 strings entered by the user and compares them by length #", "provided by the function. # Example Input/Output: # ``` # Enter the first", "<filename>q4.py<gh_stars>0 # ### Problem 4 # Write a program that allows users to", "the second string passed into the function, otherwise, the function should return false.", "### Problem 4 # Write a program that allows users to compare words", "COW # BIRD is longer than COW # ``` def chk_strings(hello, goodbye): userinput1=[h", "print the result using the return value provided by the function. # Example", "0,1 2,3, 4 userinput2=[g o o d bye] 0,1,2,4,5,6,7 userinput1=input(\"enter a string\") userinput2=input(\"enter", "the first string parameter has more characters (i.e. longer) than the second string", "goodbye): userinput1=[h e l l o] 0,1 2,3, 4 userinput2=[g o o d", "length. Implement a function called chk_strings that accepts 2 strings entered by the", "Input/Output: # ``` # Enter the first string: BIRD # Enter the second", "the function, otherwise, the function should return false. # DO NOT print the", "# Example Input/Output: # ``` # Enter the first string: BIRD # Enter", "using the return value provided by the function. # Example Input/Output: # ```", "their length. Implement a function called chk_strings that accepts 2 strings entered by", "compare words by their length. Implement a function called chk_strings that accepts 2", "user and compares them by length # The function should return true if", "# ``` # Enter the first string: BIRD # Enter the second string:", "the second string: COW # BIRD is longer than COW # ``` def", "DO NOT print the result in the function, print the result using the", "more characters (i.e. longer) than the second string passed into the function, otherwise,", "called chk_strings that accepts 2 strings entered by the user and compares them", "strings entered by the user and compares them by length # The function", "should return true if the first string parameter has more characters (i.e. longer)", "4 userinput2=[g o o d bye] 0,1,2,4,5,6,7 userinput1=input(\"enter a string\") userinput2=input(\"enter a string\")", "the user and compares them by length # The function should return true", "passed into the function, otherwise, the function should return false. # DO NOT", "# Enter the first string: BIRD # Enter the second string: COW #", "by their length. Implement a function called chk_strings that accepts 2 strings entered", "function. # Example Input/Output: # ``` # Enter the first string: BIRD #", "true if the first string parameter has more characters (i.e. longer) than the", "# ### Problem 4 # Write a program that allows users to compare", "a function called chk_strings that accepts 2 strings entered by the user and", "is longer than COW # ``` def chk_strings(hello, goodbye): userinput1=[h e l l", "has more characters (i.e. longer) than the second string passed into the function,", "compares them by length # The function should return true if the first", "in the function, print the result using the return value provided by the", "return value provided by the function. # Example Input/Output: # ``` # Enter", "by the user and compares them by length # The function should return", "print the result in the function, print the result using the return value", "if the first string parameter has more characters (i.e. longer) than the second", "Example Input/Output: # ``` # Enter the first string: BIRD # Enter the", "and compares them by length # The function should return true if the", "than the second string passed into the function, otherwise, the function should return", "false. # DO NOT print the result in the function, print the result", "the function, print the result using the return value provided by the function.", "2,3, 4 userinput2=[g o o d bye] 0,1,2,4,5,6,7 userinput1=input(\"enter a string\") userinput2=input(\"enter a", "characters (i.e. longer) than the second string passed into the function, otherwise, the", "# Enter the second string: COW # BIRD is longer than COW #", "# Write a program that allows users to compare words by their length.", "program that allows users to compare words by their length. Implement a function", "value provided by the function. # Example Input/Output: # ``` # Enter the", "words by their length. Implement a function called chk_strings that accepts 2 strings", "userinput2=[g o o d bye] 0,1,2,4,5,6,7 userinput1=input(\"enter a string\") userinput2=input(\"enter a string\") return", "the result using the return value provided by the function. # Example Input/Output:", "o o d bye] 0,1,2,4,5,6,7 userinput1=input(\"enter a string\") userinput2=input(\"enter a string\") return (userinput1,userinput2)", "than COW # ``` def chk_strings(hello, goodbye): userinput1=[h e l l o] 0,1", "``` # Enter the first string: BIRD # Enter the second string: COW", "string parameter has more characters (i.e. longer) than the second string passed into", "BIRD is longer than COW # ``` def chk_strings(hello, goodbye): userinput1=[h e l", "return true if the first string parameter has more characters (i.e. longer) than", "chk_strings that accepts 2 strings entered by the user and compares them by", "should return false. # DO NOT print the result in the function, print", "result in the function, print the result using the return value provided by", "string: BIRD # Enter the second string: COW # BIRD is longer than", "o] 0,1 2,3, 4 userinput2=[g o o d bye] 0,1,2,4,5,6,7 userinput1=input(\"enter a string\")", "function, print the result using the return value provided by the function. #", "a program that allows users to compare words by their length. Implement a", "2 strings entered by the user and compares them by length # The", "function should return false. # DO NOT print the result in the function,", "# BIRD is longer than COW # ``` def chk_strings(hello, goodbye): userinput1=[h e", "function should return true if the first string parameter has more characters (i.e.", "the function should return false. # DO NOT print the result in the", "by length # The function should return true if the first string parameter", "allows users to compare words by their length. Implement a function called chk_strings", "l l o] 0,1 2,3, 4 userinput2=[g o o d bye] 0,1,2,4,5,6,7 userinput1=input(\"enter", "longer than COW # ``` def chk_strings(hello, goodbye): userinput1=[h e l l o]", "chk_strings(hello, goodbye): userinput1=[h e l l o] 0,1 2,3, 4 userinput2=[g o o", "# DO NOT print the result in the function, print the result using", "``` def chk_strings(hello, goodbye): userinput1=[h e l l o] 0,1 2,3, 4 userinput2=[g", "the function. # Example Input/Output: # ``` # Enter the first string: BIRD", "NOT print the result in the function, print the result using the return", "longer) than the second string passed into the function, otherwise, the function should", "e l l o] 0,1 2,3, 4 userinput2=[g o o d bye] 0,1,2,4,5,6,7", "that accepts 2 strings entered by the user and compares them by length", "the return value provided by the function. # Example Input/Output: # ``` #", "by the function. # Example Input/Output: # ``` # Enter the first string:", "users to compare words by their length. Implement a function called chk_strings that", "first string parameter has more characters (i.e. longer) than the second string passed", "second string: COW # BIRD is longer than COW # ``` def chk_strings(hello,", "# ``` def chk_strings(hello, goodbye): userinput1=[h e l l o] 0,1 2,3, 4", "# The function should return true if the first string parameter has more", "function, otherwise, the function should return false. # DO NOT print the result", "otherwise, the function should return false. # DO NOT print the result in", "Problem 4 # Write a program that allows users to compare words by", "them by length # The function should return true if the first string", "function called chk_strings that accepts 2 strings entered by the user and compares", "to compare words by their length. Implement a function called chk_strings that accepts", "into the function, otherwise, the function should return false. # DO NOT print", "length # The function should return true if the first string parameter has", "BIRD # Enter the second string: COW # BIRD is longer than COW", "return false. # DO NOT print the result in the function, print the" ]
[ "create_engine, Column, Integer, String, DATETIME from sqlalchemy.ext.declarative import declarative_base from datetime import datetime", "import datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI)", "= 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO: 定义User模型 class User(Base):", "declarative_base from datetime import datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8'", "declarative_base(bind=engine) # TODO: 定义User模型 class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True,", "增加字段 age = Column(Integer, nullable=False) country = Column(String(50), nullable=False) create_time = Column(DATETIME, default=datetime.now)", "TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base = declarative_base(bind=engine)", "User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50), nullable=False)", "'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO: 定义User模型 class User(Base): __tablename__", "engine = create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO: 定义User模型 class User(Base): __tablename__ =", "class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50),", "= Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50), nullable=False) # TODO: 增加字段 age =", "Column, Integer, String, DATETIME from sqlalchemy.ext.declarative import declarative_base from datetime import datetime #", "# TODO: 增加字段 age = Column(Integer, nullable=False) country = Column(String(50), nullable=False) create_time =", "TODO: 定义User模型 class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True, autoincrement=True) name", "name = Column(String(50), nullable=False) # TODO: 增加字段 age = Column(Integer, nullable=False) country =", "DATETIME from sqlalchemy.ext.declarative import declarative_base from datetime import datetime # TODO: db_uri #", "from datetime import datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine", "db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base = declarative_base(bind=engine) #", "Column(String(50), nullable=False) # TODO: 增加字段 age = Column(Integer, nullable=False) country = Column(String(50), nullable=False)", "from sqlalchemy import create_engine, Column, Integer, String, DATETIME from sqlalchemy.ext.declarative import declarative_base from", "autoincrement=True) name = Column(String(50), nullable=False) # TODO: 增加字段 age = Column(Integer, nullable=False) country", "TODO: 增加字段 age = Column(Integer, nullable=False) country = Column(String(50), nullable=False) create_time = Column(DATETIME,", "String, DATETIME from sqlalchemy.ext.declarative import declarative_base from datetime import datetime # TODO: db_uri", "primary_key=True, autoincrement=True) name = Column(String(50), nullable=False) # TODO: 增加字段 age = Column(Integer, nullable=False)", "sqlalchemy import create_engine, Column, Integer, String, DATETIME from sqlalchemy.ext.declarative import declarative_base from datetime", "dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO: 定义User模型", "= create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO: 定义User模型 class User(Base): __tablename__ = 'user'", "Base = declarative_base(bind=engine) # TODO: 定义User模型 class User(Base): __tablename__ = 'user' id =", "nullable=False) # TODO: 增加字段 age = Column(Integer, nullable=False) country = Column(String(50), nullable=False) create_time", "'user' id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50), nullable=False) # TODO: 增加字段", "sqlalchemy.ext.declarative import declarative_base from datetime import datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI", "Integer, String, DATETIME from sqlalchemy.ext.declarative import declarative_base from datetime import datetime # TODO:", "# TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base =", "datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base", "定义User模型 class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True, autoincrement=True) name =", "import create_engine, Column, Integer, String, DATETIME from sqlalchemy.ext.declarative import declarative_base from datetime import", "Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50), nullable=False) # TODO: 增加字段 age = Column(Integer,", "from sqlalchemy.ext.declarative import declarative_base from datetime import datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8", "create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO: 定义User模型 class User(Base): __tablename__ = 'user' id", "= 'user' id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50), nullable=False) # TODO:", "datetime import datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine =", "import declarative_base from datetime import datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI =", "= Column(String(50), nullable=False) # TODO: 增加字段 age = Column(Integer, nullable=False) country = Column(String(50),", "id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50), nullable=False) # TODO: 增加字段 age", "DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO: 定义User模型 class", "# dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:root123@127.0.0.1:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO:", "# TODO: 定义User模型 class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True, autoincrement=True)", "= declarative_base(bind=engine) # TODO: 定义User模型 class User(Base): __tablename__ = 'user' id = Column(Integer,", "__tablename__ = 'user' id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50), nullable=False) #" ]
[ "placar, 20) ''' Método mostrar, desenha a maçã na tela ''' def mostrar(self):", "= \"direita\" if event.key == pygame.K_UP and self.cobra.direcao != \"baixo\": self.cobra.direcao = \"cima\"", "branco, [0, altura - placar - 2, largura, 2]) if self.pos_x + tamanho", "and self.cobra.direcao != \"esquerda\": self.cobra.direcao = \"direita\" if event.key == pygame.K_UP and self.cobra.direcao", "__init__(self, msg, cor, tam): self.font = pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True, cor)", "branco, [largura - 2, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, altura - placar", "self.cobra.direcao == \"esquerda\": self.pos_x -= tamanho elif self.cobra.direcao == \"direita\": self.pos_x += tamanho", "Atualiza toda a tela com todos os elementos que foram desenhados anteriormente '''", "jogo através da instância ''' Fecha a janela principal do jogo ''' pygame.quit()", "não se está mais jogando porque perdeu e é chamado o método \"perdido\"", "[145, 170, 355, 47]) textoContinuar = Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150, 173) '''", "array é maior que o comprimento da cobra ''' def rastro(self): if len(self.cobra)", "branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza a tela com todos os elementos '''", "self.pos_y + tamanho > altura - placar: self.pos_y = 0 if self.pos_y <", "== '__main__': instancia = Jogo() instancia.menu() # Iniciando o jogo através da instância", "self.cobra[:-1]): return True return False ''' Método reinicia, redefine todos os valores da", "tamanho, tamanho]) indice += 1 ''' Método rastro, remove a cauda quando o", "(0, 200, 0) verde_escuro = (0, 150, 0) azul = (0, 0, 255)", "47]) textoContinuar = Texto(\"Voltar ao Menu\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o", "tela ''' def mostrar(self): indice = 0 for XY in self.cobra: if indice", "no jogo ''' def iniciar(self): pontos_fundo = 0 while self.jogando: ''' Iterador de", "é checado se a cobra ultrapassou alguma das bordas, caso tenha ultrapassado é", "event.type == pygame.QUIT: self.noMenu = False if event.type == pygame.KEYDOWN: if event.key ==", "= Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza a tela com todos", "False self.perdeu = True self.perdido() if self.pos_y + tamanho > altura - placar:", "e direção, bem como o array que contém a posição de cada pedaço", "mensagem a cor e o tamanho como parâmetros ''' class Texto: def __init__(self,", "self.cobra.direcao = \"direita\" if event.key == pygame.K_UP and self.cobra.direcao != \"baixo\": self.cobra.direcao =", "para criar objetos de textop que serão exibidos nas telas do jogo, recebe", "tela onde ela começará o jogo ''' class Cobra: def __init__(self, x, y):", "durante o tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\", é verificado", "self.noMenu = False self.perdeu = False self.modo = \"livre\" self.iniciar() ''' Limpa a", "seguindo e redefine a nova posição naquela direção ''' if self.cobra.direcao == \"cima\":", "textoPontuacao.mostrar(180, 100) ''' Desenha o botão de voltar ao menu de seleção '''", "if event.key == pygame.K_UP and self.cobra.direcao != \"baixo\": self.cobra.direcao = \"cima\" if event.key", "jogo não foi fechado, bem como se nenhuma das setas foi apertada para", "''' def rastro(self): if len(self.cobra) > self.comp: del self.cobra[0] ''' Método morreu, verifica", "for XY in self.cobra: if indice == len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0],", "event.type == pygame.QUIT: self.jogando = False self.perdeu = False if event.type == pygame.KEYDOWN:", "prata = (192, 192, 192) laranja = (255, 69, 0) cinza = (79,", "reposicionada, a cobra aumenta e o placar de pontos aumenta ''' if self.pos_x", "tamanho - placar, 20) ''' Classe Jogo, definirá todo o restante do jogo,", "primeiro é feita a checagem do modo, caso o modo escolhido no menu", "de voltar ao menu de seleção ''' pygame.draw.rect(fundo, prata, [143, 168, 369, 51])", "de derrota, faz tudo que acontece ao perder, podendo o jogador voltar a", "x, y): fundo.blit(self.texto, [x, y]) ''' Classe cobra definirá os elementos do objeto", "> self.comp: del self.cobra[0] ''' Método morreu, verifica se a cobra comeu ela", "y como parâmetro, que será o local na tela onde ela começará o", "cinza, 25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar = Texto(\"Pontuação:\" + str(self.pontos), branco, 25)", "Checa se o jogador ainda não perdeu o jogo ''' if self.jogando: '''", "= Maca() ''' Método iniciar, possui o loop principal do jogo, que faz", "== pygame.QUIT: self.noMenu = False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE:", "insere a nova cabeça no array das posições ''' def move(self, x, y):", "self.jogando = False self.perdeu = True self.perdido() if self.pos_y + tamanho > altura", "pygame é importada, juntamente do modulo locals dela, além disso o metodo randrange", "como cabeça, comprimento e direção, bem como o array que contém a posição", "do jogo ou quer voltar a jogar, caso queira voltar, todo o jogo", "globais que utilizaremos em todo o código, altura e largura da tela, tamanho", "''' if self.modo == \"livre\": if self.pos_x + tamanho > largura: self.pos_x =", "Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [193, 268, 279, 58])", "checar se o pygame foi iniciado corretamente ''' try: pygame.init() print(\"O modulo pygame", "self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos", "Checa para qual direção a cobra está seguindo e redefine a nova posição", "principal do jogo, que faz absolutamente tudo que acontece no jogo ''' def", "na mesma posição, caso estejam, a maçã é reposicionada, a cobra aumenta e", "pontos_fundo += 1 self.cobra.cresce() ''' Checa se o jogador ainda não perdeu o", "como são criados os objetos maçã e cobra, não recebe parâmetros ''' #", "y]) ''' Classe cobra definirá os elementos do objeto cobra, como cabeça, comprimento", "bordas, caso tenha ultrapassado é definido que não se está mais jogando porque", "comeu ela mesma, caso tenha comido o jogo é definido perdido, e o", "voltar a jogar, caso queira voltar, todo o jogo é redefinido e se", "a nova posição que é definida como parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y)", "randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar,", "placar e o texto contendo a pontuação atual ''' pygame.draw.rect(fundo, branco, [0, altura", "tamanho]) ''' Método reposicionar, define novos x e y aleatórios para a maçã", "da cobra ''' def cresce(self): self.comp += 1 ''' Método mostrar, desenha cada", "if self.pos_y + tamanho > altura - placar: self.jogando = False self.perdeu =", "na tela ''' def mostrar(self): indice = 0 for XY in self.cobra: if", "altura - tamanho - placar, 20) ''' Classe Jogo, definirá todo o restante", "textoPlacar.mostrar(10, altura - 30) ''' Desenha a maçã na tela ''' self.maca.mostrar() '''", "Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação final do jogador '''", "= (220, 220, 220) ''' Definição de configurações do jogo, relógio para definir", "2, altura]) pygame.draw.rect(fundo, branco, [0, altura - placar - 2, largura, 2]) if", "jogando porque perdeu e é chamado o método \"perdido\" ''' if self.modo ==", "event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT and self.cobra.direcao != \"direita\": self.cobra.direcao =", "os objetos maçã e cobra, não recebe parâmetros ''' # noinspection DuplicatedCode class", "para desenhar tudo do jogo e o título da janela do jogo '''", "todos os elementos ''' pygame.display.update() ''' Instancia do jogo ''' if __name__ ==", "ainda não perdeu o jogo ''' if self.jogando: ''' Descomente e descubra o", "1 pontos_fundo += 1 ''' Aqui primeiro é feita a checagem do modo,", "== pygame.K_RIGHT and self.cobra.direcao != \"esquerda\": self.cobra.direcao = \"direita\" if event.key == pygame.K_UP", "pygame.K_ESCAPE: self.jogando = False self.perdeu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos =", "self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 if 193 <", "da cobra e maçã, tamanho do placar e cores no formato RGB''' largura", "self.maca.mostrar() ''' Atualiza toda a tela com todos os elementos que foram desenhados", "= \"classico\" self.iniciar() if 183 < mouse_x < 183 + 279 and 268", "cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação Final: \" + str(self.pontos), prata, 50)", "se o jogo não foi fechado, bem como se nenhuma das setas foi", "self.pos_x + tamanho > largura: self.jogando = False self.perdeu = True self.perdido() if", "método iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando =", "controle para continuar jogando, perder, posição e velocidade da cobra, pontos, bem como", "self.cabeca = [x, y] self.cobra.append([x, y]) ''' Método cresce, aumenta o comprimento da", "self.cobra.direcao != \"cima\": self.cobra.direcao = \"baixo\" if event.key == pygame.K_SPACE: self.pontos += 1", "def rastro(self): if len(self.cobra) > self.comp: del self.cobra[0] ''' Método morreu, verifica se", "tamanho elif self.cobra.direcao == \"direita\": self.pos_x += tamanho else: pass ''' Checa se", "foram desenhados anteriormente ''' pygame.display.update() ''' Define o fps do jogo ''' relogio.tick(15)", "1 ''' Aqui primeiro é feita a checagem do modo, caso o modo", "sim retorna verdadeiro, caso contrário, retorna falso ''' def morreu(self): if any(Bloco ==", "da tela, tamanho da cobra e maçã, tamanho do placar e cores no", "de tentativa e erro para checar se o pygame foi iniciado corretamente '''", "False self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura -", "< mouse_y < 268 + 51: self.jogando = True self.noMenu = False self.perdeu", "\"baixo\" if event.key == pygame.K_SPACE: self.pontos += 1 pontos_fundo += 1 self.cobra.cresce() '''", "pygame.K_ESCAPE: self.noMenu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x =", "''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [143, 168, 359,", "cabeça e insere a nova cabeça no array das posições ''' def move(self,", "definida como parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro deixado", "faz absolutamente tudo que acontece no jogo ''' def iniciar(self): pontos_fundo = 0", "comeu ela mesma, se sim retorna verdadeiro, caso contrário, retorna falso ''' def", "altura - 30) ''' Desenha a maçã na tela ''' self.maca.mostrar() ''' Atualiza", "maçã estão na mesma posição, caso estejam, a maçã é reposicionada, a cobra", "locals dela, além disso o metodo randrange que usaremos para gerar numeros aleatórios", "tela a cada novo inicio de loop ''' fundo.fill(self.fundo) ''' Checa para qual", "Atualiza a tela com todos os elementos ''' pygame.display.update() ''' Instancia do jogo", "durante o tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\", sendo assim", "jogador ainda não perdeu o jogo ''' if self.jogando: ''' Descomente e descubra", "if 183 < mouse_x < 183 + 279 and 268 < mouse_y <", "[195, 270, 275, 54]) textoContinuar = Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210, 273) '''", "len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0],", "def morreu(self): if any(Bloco == self.cabeca for Bloco in self.cobra[:-1]): return True return", "execução estão podem ser obtidos pelo \"pygame.event.get()\", é verificado se o jogador quis", "o jogador ainda não perdeu o jogo ''' if self.jogando: ''' Descomente e", "270, 275, 54]) textoContinuar = Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza", "self.velocidade_y = 0 self.pontos = 0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca = Maca()", "atravessar o mapa, mas caso tenha escolhido o modo clássico é checado se", "comida pela cobra ''' def reposicionar(self): self.x = randrange(0, largura - tamanho, 20)", "y que é a posição da maçã na tela ''' class Maca: def", "jogador ''' textoPontuacaoSombra = Texto(\"Pontuação Final: \" + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99)", "tela com todos os elementos ''' pygame.display.update() def menu(self): while self.noMenu: ''' Iterador", "Jogo() instancia.menu() # Iniciando o jogo através da instância ''' Fecha a janela", "self.comp = 1 self.cobra = [self.cabeca] self.direcao = \"\" ''' Método move, recebe", "cinzaClaro = (220, 220, 220) ''' Definição de configurações do jogo, relógio para", "bloco de tentativa e erro para checar se o pygame foi iniciado corretamente", "randrange(0, altura - tamanho - placar, 20) ''' Método mostrar, desenha a maçã", "20) self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0", "192) laranja = (255, 69, 0) cinza = (79, 79, 79) cinzaClaro =", "setas foi apertada para mover a cobra ''' for event in pygame.event.get(): if", "continuar jogando, perder, posição e velocidade da cobra, pontos, bem como são criados", "self.x = x self.y = y self.cabeca = [x, y] self.comp = 1", "estão podem ser obtidos pelo \"pygame.event.get()\", é verificado se o jogador quis sair", "da cobra, pontos, bem como são criados os objetos maçã e cobra, não", "o título da janela do jogo ''' relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura,", "= False self.perdeu = False self.modo = \"livre\" self.iniciar() ''' Limpa a tela", "''' Método perdido, possui o loop da tela de derrota, faz tudo que", "279 and 268 < mouse_y < 268 + 58: self.jogando = True self.perdeu", "título da janela do jogo ''' relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura))", "self.cobra.direcao != \"baixo\": self.cobra.direcao = \"cima\" if event.key == pygame.K_DOWN and self.cobra.direcao !=", "''' try: pygame.init() print(\"O modulo pygame foi inicializado com sucesso\") except: print(\"O modulo", "os elementos ''' pygame.display.update() def menu(self): while self.noMenu: ''' Iterador de eventos, todos", "''' Método move, recebe os parâmetro x e y, que serão as novas", "criados os objetos maçã e cobra, não recebe parâmetros ''' # noinspection DuplicatedCode", "tamanho - placar, 20) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0", "a mensagem a cor e o tamanho como parâmetros ''' class Texto: def", "and 168 < mouse_y < 168 + 51: self.jogando = False self.perdeu =", "''' relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe texto", "jogo ''' if __name__ == '__main__': instancia = Jogo() instancia.menu() # Iniciando o", "y] self.comp = 1 self.cobra = [self.cabeca] ''' Classe maçã que definirá o", "Classe Jogo, definirá todo o restante do jogo, como variaveis de controle para", "20) self.y = randrange(0, altura - tamanho - placar, 20) ''' Método mostrar,", "event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False self.perdeu = False", "= \"livre\" self.iniciar() ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha o titulo", "os atributos x e y que é a posição da maçã na tela", "desenhados anteriormente ''' pygame.display.update() ''' Define o fps do jogo ''' relogio.tick(15) '''", "jogando ''' pygame.draw.rect(fundo, prata, [183, 268, 279, 51]) pygame.draw.rect(fundo, preto, [185, 270, 275,", "a maçã é reposicionada, a cobra aumenta e o placar de pontos aumenta", "143 + 359 and 168 < mouse_y < 168 + 51: self.jogando =", "do jogador ''' textoPontuacaoSombra = Texto(\"Pontuação Final: \" + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179,", "[143, 168, 359, 51]) pygame.draw.rect(fundo, preto, [145, 170, 355, 47]) textoContinuar = Texto(\"Modo", "pelo \"pygame.event.get()\", é verificado se o jogador quis sair do jogo ou quer", "para checar se o pygame foi iniciado corretamente ''' try: pygame.init() print(\"O modulo", "2, largura, 2]) if self.pos_x + tamanho > largura: self.jogando = False self.perdeu", "False self.perdeu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x =", "fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe texto servirá para criar objetos", "if event.key == pygame.K_ESCAPE: self.jogando = False self.perdeu = False if event.type ==", "[183, 268, 279, 51]) pygame.draw.rect(fundo, preto, [185, 270, 275, 47]) textoContinuar = Texto(\"Modo", "< 183 + 279 and 268 < mouse_y < 268 + 51: self.jogando", "jogando, perder, posição e velocidade da cobra, pontos, bem como são criados os", "que contém a posição de cada pedaço da cobra, recebe as coordenadas x", "(255, 69, 0) cinza = (79, 79, 79) cinzaClaro = (220, 220, 220)", "eventos que acontecem durante o tempo de execução estão podem ser obtidos pelo", "y self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca] self.direcao =", "__name__ == '__main__': instancia = Jogo() instancia.menu() # Iniciando o jogo através da", "self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura - tamanho", "0 for XY in self.cobra: if indice == len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro,", "tamanho = 20 placar = 40 branco = (255, 255, 255) preto =", "''' Classe cobra definirá os elementos do objeto cobra, como cabeça, comprimento e", "mouse_x < 193 + 279 and 268 < mouse_y < 268 + 58:", "mostrar(self, x, y): fundo.blit(self.texto, [x, y]) ''' Classe cobra definirá os elementos do", "bem como são criados os objetos maçã e cobra, não recebe parâmetros '''", "self.jogando = False self.perdeu = False self.noMenu = True self.pos_x = randrange(0, largura", "0 self.pontos = 0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca = Maca() ''' Método", "193 < mouse_x < 193 + 279 and 268 < mouse_y < 268", "self.perdeu = False self.noMenu = True self.pos_x = randrange(0, largura - tamanho, 20)", "self.jogando = False break if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT and", "= Texto(\"Pontuação Final: \" + str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha o", "do jogo ''' relogio.tick(15) ''' Método perdido, possui o loop da tela de", "a tela ''' fundo.fill(branco) ''' Desenha \"<NAME>\" na tela ''' textoPerdeuSombra = Texto(\"<NAME>\",", "modulo locals dela, além disso o metodo randrange que usaremos para gerar numeros", "pygame.K_RIGHT and self.cobra.direcao != \"esquerda\": self.cobra.direcao = \"direita\" if event.key == pygame.K_UP and", "True self.perdido() if self.pos_y + tamanho > altura - placar: self.jogando = False", "- placar - 2, largura, 2]) if self.pos_x + tamanho > largura: self.jogando", "parâmetro, que será o local na tela onde ela começará o jogo '''", "self.cobra.rastro() ''' Checa se a cobra comeu ela mesma, caso tenha comido o", "self.pos_x = largura - tamanho if self.pos_y + tamanho > altura - placar:", "183 < mouse_x < 183 + 279 and 268 < mouse_y < 268", "o fps, fundo para desenhar tudo do jogo e o título da janela", "+ 58: self.jogando = True self.perdeu = False self.pos_x = randrange(0, largura -", "seleção ''' pygame.draw.rect(fundo, prata, [143, 168, 369, 51]) pygame.draw.rect(fundo, preto, [145, 170, 365,", "pontos, bem como são criados os objetos maçã e cobra, não recebe parâmetros", "Desenha a pontuação final do jogador ''' textoPontuacaoSombra = Texto(\"Pontuação Final: \" +", "tela com todos os elementos que foram desenhados anteriormente ''' pygame.display.update() ''' Define", "tamanho do placar e cores no formato RGB''' largura = 640 altura =", "adicionais ''' self.cobra.rastro() ''' Checa se a cobra comeu ela mesma, caso tenha", "inicio de loop ''' fundo.fill(self.fundo) ''' Checa para qual direção a cobra está", "caso depois de ter perdido o jogados possa continuar jogando ''' def reinicia(self,", "a cobra na tela ''' self.cobra.mostrar() ''' Desenha o placar e o texto", "False self.perdeu = False self.noMenu = True self.pos_x = randrange(0, largura - tamanho,", "é chamado o método \"perdido\" ''' if self.modo == \"livre\": if self.pos_x +", "foi iniciado corretamente ''' try: pygame.init() print(\"O modulo pygame foi inicializado com sucesso\")", "de cada pedaço da cobra, recebe as coordenadas x e y como parâmetro,", "# textoPerdeuSombra = Texto(\"Snake Game\", cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake", "no formato RGB''' largura = 640 altura = 480 tamanho = 20 placar", "perdido, possui o loop da tela de derrota, faz tudo que acontece ao", "168 + 51: self.jogando = False self.perdeu = False self.noMenu = True self.pos_x", "268 + 51: self.jogando = True self.noMenu = False self.perdeu = False self.modo", "+ 51: self.jogando = True self.noMenu = False self.perdeu = False self.modo =", "atributos x e y que é a posição da maçã na tela '''", "mouse_y = mouse_pos[1] if 143 < mouse_x < 143 + 359 and 168", "self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo += 1 ''' Aqui primeiro é feita", "''' Iterador de eventos, todos os eventos que acontecem durante o tempo de", "placar e cores no formato RGB''' largura = 640 altura = 480 tamanho", "!= \"baixo\": self.cobra.direcao = \"cima\" if event.key == pygame.K_DOWN and self.cobra.direcao != \"cima\":", "< 193 + 279 and 268 < mouse_y < 268 + 58: self.jogando", "desenha a maçã na tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho,", "fps, fundo para desenhar tudo do jogo e o título da janela do", "largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura - 31)", "o jogador quis sair do jogo ou quer voltar a jogar, caso queira", "\"esquerda\": self.pos_x -= tamanho elif self.cobra.direcao == \"direita\": self.pos_x += tamanho else: pass", "import pygame import pygame.locals from random import randrange print(\"Módulos importados com sucesso\") '''", "Game\", preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha o botão de continuar jogando '''", "é chamado ''' if self.cobra.morreu(): self.jogando = False self.perdeu = True self.perdido() '''", "branco = (255, 255, 255) preto = (0, 0, 0) vermelho = (255,", "273) ''' Atualiza a tela com todos os elementos ''' pygame.display.update() ''' Instancia", "acontecem durante o tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\", é", "self.cobra.direcao == \"direita\": self.pos_x += tamanho else: pass ''' Checa se a cobra", "self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 self.cobra = Cobra(self.pos_x, self.pos_y)", "tudo que acontece ao perder, podendo o jogador voltar a jogar ou sair", "\"cima\": self.pos_y -= tamanho elif self.cobra.direcao == \"baixo\": self.pos_y += tamanho elif self.cobra.direcao", "método \"perdido\" é chamado ''' if self.cobra.morreu(): self.jogando = False self.perdeu = True", "= Texto(\"Pontuação:\" + str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura - 30) ''' Desenha a", "[0, 0, largura, 2]) pygame.draw.rect(fundo, branco, [largura - 2, 0, 2, altura]) pygame.draw.rect(fundo,", "True self.perdido() ''' Move a cobra para a nova posição que é definida", "pygame.draw.rect(fundo, branco, [0, 0, largura, 2]) pygame.draw.rect(fundo, branco, [largura - 2, 0, 2,", "False self.noMenu = False self.modo = \"classico\" self.iniciar() if 183 < mouse_x <", "das bordas, caso tenha ultrapassado é definido que não se está mais jogando", "texto contendo a pontuação atual ''' pygame.draw.rect(fundo, branco, [0, altura - placar, largura,", "''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False self.perdeu", "[145, 170, 365, 47]) textoContinuar = Texto(\"Voltar ao Menu\", branco, 70) textoContinuar.mostrar(150, 173)", "começará o jogo ''' class Cobra: def __init__(self, x, y): self.x = x", "placar, 20) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 self.cobra =", "Cobra(self.pos_x, self.pos_y) self.maca = Maca() ''' Método iniciar, possui o loop principal do", "pygame.event.get(): if event.type == pygame.QUIT: self.noMenu = False if event.type == pygame.KEYDOWN: if", "não possuirá bordas e você poderá atravessar o mapa, mas caso tenha escolhido", "derrota, faz tudo que acontece ao perder, podendo o jogador voltar a jogar", "self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 if", "mas caso tenha escolhido o modo clássico é checado se a cobra ultrapassou", "168 < mouse_y < 168 + 51: self.jogando = False self.perdeu = False", "a tela com todos os elementos ''' pygame.display.update() def menu(self): while self.noMenu: '''", "iniciado corretamente ''' try: pygame.init() print(\"O modulo pygame foi inicializado com sucesso\") except:", "self.pontos += 1 pontos_fundo += 1 self.cobra.cresce() ''' Checa se o jogador ainda", "para o método iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT:", "mesma posição, caso estejam, a maçã é reposicionada, a cobra aumenta e o", "pygame.display.update() ''' Define o fps do jogo ''' relogio.tick(15) ''' Método perdido, possui", "sucesso\") ''' Declaração das váriaveis globais que utilizaremos em todo o código, altura", "== \"baixo\": self.pos_y += tamanho elif self.cobra.direcao == \"esquerda\": self.pos_x -= tamanho elif", "objetos de textop que serão exibidos nas telas do jogo, recebe a mensagem", "51: self.jogando = True self.perdeu = False self.noMenu = False self.modo = \"classico\"", "False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando = False self.perdeu", "Método mostrar desenha na tela o texto criado pelo construtor da classe '''", "pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando = False self.perdeu = False if event.type", "jogador voltar a jogar ou sair do jogo ''' def perdido(self): while self.perdeu:", "= mouse_pos[1] if 143 < mouse_x < 143 + 369 and 168 <", "and self.cobra.direcao != \"direita\": self.cobra.direcao = \"esquerda\" if event.key == pygame.K_RIGHT and self.cobra.direcao", "self.font = pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True, cor) ''' Método mostrar desenha", "Método cresce, aumenta o comprimento da cobra ''' def cresce(self): self.comp += 1", "self.modo = None self.fundo = preto self.pos_x = randrange(0, largura - tamanho, 20)", "def move(self, x, y): self.cabeca = [x, y] self.cobra.append([x, y]) ''' Método cresce,", "random import randrange print(\"Módulos importados com sucesso\") ''' Utilizando um bloco de tentativa", "da maçã na tela ''' class Maca: def __init__(self): self.x = randrange(0, largura", "355, 47]) textoContinuar = Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o", "para os valores iniciais, para caso depois de ter perdido o jogados possa", "placar else: pygame.draw.rect(fundo, branco, [0, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, 0, largura,", "branco, [0, 0, largura, 2]) pygame.draw.rect(fundo, branco, [largura - 2, 0, 2, altura])", "self.pos_x = 0 if self.pos_x < 0: self.pos_x = largura - tamanho if", "caso queira voltar, todo o jogo é redefinido e se retorna para o", "branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo,", "(79, 79, 79) cinzaClaro = (220, 220, 220) ''' Definição de configurações do", "verificado se o jogador quis sair do jogo ou quer voltar a jogar,", "que não se está mais jogando porque perdeu e é chamado o método", "caso tenha ultrapassado é definido que não se está mais jogando porque perdeu", "continuar jogando ''' def reinicia(self, x, y): self.x = x self.y = y", "= Cobra(self.pos_x, self.pos_y) self.maca = Maca() ''' Método iniciar, possui o loop principal", "Descomente e descubra o que isso faz''' # if pontos_fundo == 10: #", "if self.cobra.direcao == \"cima\": self.pos_y -= tamanho elif self.cobra.direcao == \"baixo\": self.pos_y +=", "mouse_x < 143 + 369 and 168 < mouse_y < 168 + 51:", "__init__(self, x, y): self.x = x self.y = y self.cabeca = [x, y]", "o jogo ''' if self.jogando: ''' Descomente e descubra o que isso faz'''", "possa continuar jogando ''' def reinicia(self, x, y): self.x = x self.y =", "str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar = Texto(\"Pontuação:\" + str(self.pontos), branco,", "XY in self.cobra: if indice == len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1],", "não recebe nenhum parâmetro, possui os atributos x e y que é a", "cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110, 30)", "self.perdido() if self.pos_x < 0: self.jogando = False self.perdeu = True self.perdido() if", "direção, bem como o array que contém a posição de cada pedaço da", "textop que serão exibidos nas telas do jogo, recebe a mensagem a cor", "y]) ''' Método cresce, aumenta o comprimento da cobra ''' def cresce(self): self.comp", "todos os elementos ''' pygame.display.update() def menu(self): while self.noMenu: ''' Iterador de eventos,", "''' Método mostrar, desenha a maçã na tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho,", "perdeu e é chamado o método \"perdido\" ''' if self.modo == \"livre\": if", "if self.modo == \"livre\": if self.pos_x + tamanho > largura: self.pos_x = 0", "= (0, 200, 0) verde_escuro = (0, 150, 0) azul = (0, 0,", "''' def iniciar(self): pontos_fundo = 0 while self.jogando: ''' Iterador de eventos, todos", "== pygame.K_SPACE: self.pontos += 1 pontos_fundo += 1 self.cobra.cresce() ''' Checa se o", "o botão de voltar ao menu de seleção ''' pygame.draw.rect(fundo, prata, [143, 168,", "< 143 + 359 and 168 < mouse_y < 168 + 51: self.jogando", "pygame.draw.rect(fundo, branco, [0, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, 0, largura, 2]) pygame.draw.rect(fundo,", "cobra e maçã, tamanho do placar e cores no formato RGB''' largura =", "20) self.y = randrange(0, altura - tamanho - placar, 20) ''' Classe Jogo,", "perdido, e o método \"perdido\" é chamado ''' if self.cobra.morreu(): self.jogando = False", "< mouse_x < 143 + 369 and 168 < mouse_y < 168 +", "x e y como parâmetro, que será o local na tela onde ela", "noinspection DuplicatedCode class Jogo: def __init__(self): self.jogando = False self.perdeu = False self.noMenu", "largura, 2]) pygame.draw.rect(fundo, branco, [largura - 2, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0,", "caso tenha escolhido o modo clássico é checado se a cobra ultrapassou alguma", "= [self.cabeca] ''' Classe maçã que definirá o objeto maçã, não recebe nenhum", "parâmetros ''' # noinspection DuplicatedCode class Jogo: def __init__(self): self.jogando = False self.perdeu", "[x, y] self.comp = 1 self.cobra = [self.cabeca] ''' Classe maçã que definirá", "''' pygame.draw.rect(fundo, prata, [143, 168, 359, 51]) pygame.draw.rect(fundo, preto, [145, 170, 355, 47])", "a cada novo inicio de loop ''' fundo.fill(self.fundo) ''' Checa para qual direção", "0 if 193 < mouse_x < 193 + 279 and 268 < mouse_y", "perdido o jogados possa continuar jogando ''' def reinicia(self, x, y): self.x =", "any(Bloco == self.cabeca for Bloco in self.cobra[:-1]): return True return False ''' Método", "tamanho, 20) self.y = randrange(0, altura - tamanho - placar, 20) ''' Método", "pelo \"pygame.event.get()\", sendo assim verificado se o jogo não foi fechado, bem como", "está mais jogando porque perdeu e é chamado o método \"perdido\" ''' if", "- placar, largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura", "= Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar", "else: pass ''' Checa se a cobra e a maçã estão na mesma", "textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160,", "self.perdeu = False self.noMenu = False self.modo = \"classico\" self.iniciar() if 183 <", "como se nenhuma das setas foi apertada para mover a cobra ''' for", "verde, [XY[0], XY[1], tamanho, tamanho]) indice += 1 ''' Método rastro, remove a", "recebe a mensagem a cor e o tamanho como parâmetros ''' class Texto:", "__init__(self): self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0, altura -", "1 ''' Método mostrar, desenha cada pedaço da cobra na tela ''' def", "143 < mouse_x < 143 + 359 and 168 < mouse_y < 168", "tamanho elif self.cobra.direcao == \"esquerda\": self.pos_x -= tamanho elif self.cobra.direcao == \"direita\": self.pos_x", "self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 '''", "self.fundo = preto self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0,", "self.jogando = False self.perdeu = True self.perdido() ''' Move a cobra para a", "não foi fechado, bem como se nenhuma das setas foi apertada para mover", "+= tamanho else: pass ''' Checa se a cobra e a maçã estão", "não perdeu o jogo ''' if self.jogando: ''' Descomente e descubra o que", "''' def move(self, x, y): self.cabeca = [x, y] self.cobra.append([x, y]) ''' Método", "''' Desenha o titulo \"Snake Game\" na tela ''' # textoPerdeuSombra = Texto(\"Snake", "podem ser obtidos pelo \"pygame.event.get()\", é verificado se o jogador quis sair do", "não foi inicializado com sucesso\") ''' Declaração das váriaveis globais que utilizaremos em", "self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo += 1 ''' Aqui primeiro é", "vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação final do jogador ''' textoPontuacaoSombra", "Declaração das váriaveis globais que utilizaremos em todo o código, altura e largura", "é verificado se o jogador quis sair do jogo ou quer voltar a", "a nova cabeça no array das posições ''' def move(self, x, y): self.cabeca", "\" + str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha o botão de voltar", "pontos_fundo == 10: # pontos_fundo = 0 # if self.fundo == branco: #", "altura e largura da tela, tamanho da cobra e maçã, tamanho do placar", "exibidos nas telas do jogo, recebe a mensagem a cor e o tamanho", "o método iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.noMenu", "no menu tenha sido o modo livre, o jogo não possuirá bordas e", "0: self.pos_y = altura - tamanho - placar else: pygame.draw.rect(fundo, branco, [0, 0,", "__init__(self): self.jogando = False self.perdeu = False self.noMenu = True self.modo = None", "def mostrar(self): indice = 0 for XY in self.cobra: if indice == len(self.cobra)", "220) ''' Definição de configurações do jogo, relógio para definir o fps, fundo", "maçã na tela ''' self.maca.mostrar() ''' Atualiza toda a tela com todos os", "self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca] self.direcao = \"\"", "False self.noMenu = True self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y =", "que é a posição da maçã na tela ''' class Maca: def __init__(self):", "cobra, não recebe parâmetros ''' # noinspection DuplicatedCode class Jogo: def __init__(self): self.jogando", "2, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, altura - placar - 2, largura,", "= True self.perdeu = False self.noMenu = False self.modo = \"classico\" self.iniciar() if", "Método mostrar, desenha a maçã na tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x,", "= 0 self.pontos = 0 if 193 < mouse_x < 193 + 279", "self.texto = self.font.render(msg, True, cor) ''' Método mostrar desenha na tela o texto", "a cobra comeu ela mesma, caso tenha comido o jogo é definido perdido,", "self.jogando = False self.perdeu = False self.noMenu = True self.modo = None self.fundo", "if self.fundo == branco: # self.fundo = preto # else: # self.fundo =", "279, 58]) pygame.draw.rect(fundo, preto, [195, 270, 275, 54]) textoContinuar = Texto(\"Novo Jogo\", branco,", "y] self.cobra.append([x, y]) ''' Método cresce, aumenta o comprimento da cobra ''' def", "[XY[0], XY[1], tamanho, tamanho]) indice += 1 ''' Método rastro, remove a cauda", "em todo o código, altura e largura da tela, tamanho da cobra e", "não recebe parâmetros ''' # noinspection DuplicatedCode class Jogo: def __init__(self): self.jogando =", "True self.perdeu = False self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y =", "58: self.jogando = True self.perdeu = False self.pos_x = randrange(0, largura - tamanho,", "os elementos do objeto cobra, como cabeça, comprimento e direção, bem como o", "da cobra ''' def rastro(self): if len(self.cobra) > self.comp: del self.cobra[0] ''' Método", "continuar jogando ''' pygame.draw.rect(fundo, prata, [183, 268, 279, 51]) pygame.draw.rect(fundo, preto, [185, 270,", "cada pedaço da cobra, recebe as coordenadas x e y como parâmetro, que", "self.cobra = Cobra(self.pos_x, self.pos_y) self.maca = Maca() ''' Método iniciar, possui o loop", "self.pos_y = 0 if self.pos_y < 0: self.pos_y = altura - tamanho -", "método \"perdido\" ''' if self.modo == \"livre\": if self.pos_x + tamanho > largura:", "''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [183, 268, 279,", "que serão exibidos nas telas do jogo, recebe a mensagem a cor e", "fundo.fill(self.fundo) ''' Checa para qual direção a cobra está seguindo e redefine a", "def perdido(self): while self.perdeu: ''' Iterador de eventos, todos os eventos que acontecem", "do jogo, que faz absolutamente tudo que acontece no jogo ''' def iniciar(self):", "contrário, retorna falso ''' def morreu(self): if any(Bloco == self.cabeca for Bloco in", "0 self.velocidade_y = 0 self.pontos = 0 if 193 < mouse_x < 193", "''' pygame.draw.rect(fundo, prata, [193, 268, 279, 58]) pygame.draw.rect(fundo, preto, [195, 270, 275, 54])", "self.perdido() ''' Move a cobra para a nova posição que é definida como", "[x, y] self.cobra.append([x, y]) ''' Método cresce, aumenta o comprimento da cobra '''", "tela ''' fundo.fill(branco) ''' Desenha \"<NAME>\" na tela ''' textoPerdeuSombra = Texto(\"<NAME>\", cinza,", "largura, 2]) if self.pos_x + tamanho > largura: self.jogando = False self.perdeu =", "mover a cobra ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando", "= True self.noMenu = False self.perdeu = False self.modo = \"livre\" self.iniciar() '''", "tamanho como parâmetros ''' class Texto: def __init__(self, msg, cor, tam): self.font =", "o metodo randrange que usaremos para gerar numeros aleatórios para as posições da", "as coordenadas x e y como parâmetro, que será o local na tela", "> largura: self.pos_x = 0 if self.pos_x < 0: self.pos_x = largura -", "if __name__ == '__main__': instancia = Jogo() instancia.menu() # Iniciando o jogo através", "= branco ''' Limpa a tela a cada novo inicio de loop '''", "Texto(\"Pontuação Final: \" + str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha o botão", "= True self.perdido() if self.pos_x < 0: self.jogando = False self.perdeu = True", "= False self.perdeu = True self.perdido() ''' Move a cobra para a nova", "perdeu o jogo ''' if self.jogando: ''' Descomente e descubra o que isso", "[self.cabeca] self.direcao = \"\" ''' Método move, recebe os parâmetro x e y,", "perder, podendo o jogador voltar a jogar ou sair do jogo ''' def", "pygame.K_DOWN and self.cobra.direcao != \"cima\": self.cobra.direcao = \"baixo\" if event.key == pygame.K_SPACE: self.pontos", "\"Snake Game\" na tela ''' # textoPerdeuSombra = Texto(\"Snake Game\", cinza, 100) #", "um bloco de tentativa e erro para checar se o pygame foi iniciado", "''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho]) ''' Método reposicionar, define", "todos os valores da cobra para os valores iniciais, para caso depois de", "da janela do jogo ''' relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake", "(0, 0, 255) prata = (192, 192, 192) laranja = (255, 69, 0)", "coordenadas da cabeça e insere a nova cabeça no array das posições '''", "botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [193, 268, 279, 58]) pygame.draw.rect(fundo, preto,", "textoPerdeuSombra = Texto(\"Snake Game\", cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake Game\",", "self.modo = \"classico\" self.iniciar() if 183 < mouse_x < 183 + 279 and", "o tamanho do array é maior que o comprimento da cobra ''' def", "botão de voltar ao menu de seleção ''' pygame.draw.rect(fundo, prata, [143, 168, 369,", "mostrar, desenha cada pedaço da cobra na tela ''' def mostrar(self): indice =", "a maçã na tela ''' self.maca.mostrar() ''' Atualiza toda a tela com todos", "definido que não se está mais jogando porque perdeu e é chamado o", "a maçã após ser comida pela cobra ''' def reposicionar(self): self.x = randrange(0,", "aleatórios para a maçã após ser comida pela cobra ''' def reposicionar(self): self.x", "while self.jogando: ''' Iterador de eventos, todos os eventos que acontecem durante o", "self.pos_y = randrange(0, altura - tamanho - placar, 20) self.cobra.direcao = \"\" self.maca.reposicionar()", "self.pos_y = randrange(0, altura - tamanho - placar, 20) self.velocidade_x = 0 self.velocidade_y", "''' Limpa a tela ''' fundo.fill(branco) ''' Desenha \"<NAME>\" na tela ''' textoPerdeuSombra", "a tela a cada novo inicio de loop ''' fundo.fill(self.fundo) ''' Checa para", "except: print(\"O modulo pygame não foi inicializado com sucesso\") ''' Declaração das váriaveis", "= randrange(0, altura - tamanho - placar, 20) self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x,", "> altura - placar: self.jogando = False self.perdeu = True self.perdido() if self.pos_y", "51: self.jogando = False self.perdeu = False self.noMenu = True self.pos_x = randrange(0,", "# if self.fundo == branco: # self.fundo = preto # else: # self.fundo", "cor e o tamanho como parâmetros ''' class Texto: def __init__(self, msg, cor,", "self.jogando = False self.perdeu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos()", "pygame.display.set_caption(\"Snake Game\") ''' Classe texto servirá para criar objetos de textop que serão", "273) ''' Atualiza a tela com todos os elementos ''' pygame.display.update() def menu(self):", "nova posição que é definida como parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y) '''", "cobra ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False", "comprimento da cobra ''' def cresce(self): self.comp += 1 ''' Método mostrar, desenha", "tam): self.font = pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True, cor) ''' Método mostrar", "tamanho - placar, 20) ''' Método mostrar, desenha a maçã na tela '''", "o texto contendo a pontuação atual ''' pygame.draw.rect(fundo, branco, [0, altura - placar,", "70) textoContinuar.mostrar(210, 273) ''' Atualiza a tela com todos os elementos ''' pygame.display.update()", "do jogo ''' if __name__ == '__main__': instancia = Jogo() instancia.menu() # Iniciando", "randrange(0, altura - tamanho - placar, 20) self.velocidade_x = 0 self.velocidade_y = 0", "recebe as coordenadas x e y como parâmetro, que será o local na", "textoPerdeu = Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação final do", "cauda quando o tamanho do array é maior que o comprimento da cobra", "ser comida pela cobra ''' def reposicionar(self): self.x = randrange(0, largura - tamanho,", "mesma, se sim retorna verdadeiro, caso contrário, retorna falso ''' def morreu(self): if", "e cobra, não recebe parâmetros ''' # noinspection DuplicatedCode class Jogo: def __init__(self):", "< 0: self.pos_y = altura - tamanho - placar else: pygame.draw.rect(fundo, branco, [0,", "if event.key == pygame.K_RIGHT and self.cobra.direcao != \"esquerda\": self.cobra.direcao = \"direita\" if event.key", "qual direção a cobra está seguindo e redefine a nova posição naquela direção", "''' Classe Jogo, definirá todo o restante do jogo, como variaveis de controle", "nenhum parâmetro, possui os atributos x e y que é a posição da", "0, 255) prata = (192, 192, 192) laranja = (255, 69, 0) cinza", "+= 1 pontos_fundo += 1 self.cobra.cresce() ''' Checa se o jogador ainda não", "= (0, 150, 0) azul = (0, 0, 255) prata = (192, 192,", "na tela o texto criado pelo construtor da classe ''' def mostrar(self, x,", "como o array que contém a posição de cada pedaço da cobra, recebe", "o titulo \"Snake Game\" na tela ''' # textoPerdeuSombra = Texto(\"Snake Game\", cinza,", "o jogo não foi fechado, bem como se nenhuma das setas foi apertada", "pygame.QUIT: self.noMenu = False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu", "Jogo: def __init__(self): self.jogando = False self.perdeu = False self.noMenu = True self.modo", "def reposicionar(self): self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0, altura", "270, 275, 47]) textoContinuar = Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza", "''' Método rastro, remove a cauda quando o tamanho do array é maior", "das setas foi apertada para mover a cobra ''' for event in pygame.event.get():", "y] self.comp = 1 self.cobra = [self.cabeca] self.direcao = \"\" ''' Método move,", "= True self.perdeu = False self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y", "< mouse_y < 168 + 51: self.jogando = True self.perdeu = False self.noMenu", "o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [183, 268, 279, 51]) pygame.draw.rect(fundo,", "tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho]) ''' Método reposicionar,", "event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando = False self.perdeu = False", "todos os elementos que foram desenhados anteriormente ''' pygame.display.update() ''' Define o fps", "branco ''' Limpa a tela a cada novo inicio de loop ''' fundo.fill(self.fundo)", "do jogo ''' def perdido(self): while self.perdeu: ''' Iterador de eventos, todos os", "o pygame foi iniciado corretamente ''' try: pygame.init() print(\"O modulo pygame foi inicializado", "== \"direita\": self.pos_x += tamanho else: pass ''' Checa se a cobra e", "self.perdido() ''' Desenha a cobra na tela ''' self.cobra.mostrar() ''' Desenha o placar", "tentativa e erro para checar se o pygame foi iniciado corretamente ''' try:", "randrange(0, altura - tamanho - placar, 20) ''' Classe Jogo, definirá todo o", "randrange(0, altura - tamanho - placar, 20) self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y)", "e erro para checar se o pygame foi iniciado corretamente ''' try: pygame.init()", "True self.perdeu = False self.noMenu = False self.modo = \"classico\" self.iniciar() if 183", "def __init__(self, x, y): self.x = x self.y = y self.cabeca = [x,", "+= tamanho elif self.cobra.direcao == \"esquerda\": self.pos_x -= tamanho elif self.cobra.direcao == \"direita\":", "Limpa a tela a cada novo inicio de loop ''' fundo.fill(self.fundo) ''' Checa", "self.noMenu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0]", "cores no formato RGB''' largura = 640 altura = 480 tamanho = 20", "480 tamanho = 20 placar = 40 branco = (255, 255, 255) preto", "e é chamado o método \"perdido\" ''' if self.modo == \"livre\": if self.pos_x", "array das posições ''' def move(self, x, y): self.cabeca = [x, y] self.cobra.append([x,", "if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT and self.cobra.direcao != \"direita\": self.cobra.direcao", "cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha", "maçã e cobra, não recebe parâmetros ''' # noinspection DuplicatedCode class Jogo: def", "recebe nenhum parâmetro, possui os atributos x e y que é a posição", "= [x, y] self.cobra.append([x, y]) ''' Método cresce, aumenta o comprimento da cobra", "self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 ''' Limpa a tela", "False self.perdeu = False self.modo = \"livre\" self.iniciar() ''' Limpa a tela '''", "pelo construtor da classe ''' def mostrar(self, x, y): fundo.blit(self.texto, [x, y]) '''", "reposicionar(self): self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0, altura -", "+ str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura - 30) ''' Desenha a maçã na", "self.cobra.cresce() self.pontos += 1 pontos_fundo += 1 ''' Aqui primeiro é feita a", "os elementos ''' pygame.display.update() ''' Instancia do jogo ''' if __name__ == '__main__':", "- 2, largura, 2]) if self.pos_x + tamanho > largura: self.jogando = False", "self.perdeu = True self.perdido() if self.pos_y < 0: self.jogando = False self.perdeu =", "== pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if 143", "''' if self.jogando: ''' Descomente e descubra o que isso faz''' # if", "self.font.render(msg, True, cor) ''' Método mostrar desenha na tela o texto criado pelo", "parâmetros ''' class Texto: def __init__(self, msg, cor, tam): self.font = pygame.font.SysFont(None, tam)", "= 40 branco = (255, 255, 255) preto = (0, 0, 0) vermelho", "print(\"Módulos importados com sucesso\") ''' Utilizando um bloco de tentativa e erro para", "elementos ''' pygame.display.update() def menu(self): while self.noMenu: ''' Iterador de eventos, todos os", "do array é maior que o comprimento da cobra ''' def rastro(self): if", "dela, além disso o metodo randrange que usaremos para gerar numeros aleatórios para", "textoContinuar.mostrar(190, 273) ''' Atualiza a tela com todos os elementos ''' pygame.display.update() '''", "clássico é checado se a cobra ultrapassou alguma das bordas, caso tenha ultrapassado", "= False self.perdeu = True self.perdido() ''' Desenha a cobra na tela '''", "tela ''' fundo.fill(branco) ''' Desenha o titulo \"Snake Game\" na tela ''' #", "preto, [185, 270, 275, 47]) textoContinuar = Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190, 273)", "in self.cobra[:-1]): return True return False ''' Método reinicia, redefine todos os valores", "pygame.draw.rect(fundo, branco, [largura - 2, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, altura -", "se a cobra e a maçã estão na mesma posição, caso estejam, a", "168 + 51: self.jogando = True self.perdeu = False self.noMenu = False self.modo", "pontos aumenta ''' if self.pos_x == self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce()", "tela ''' self.cobra.mostrar() ''' Desenha o placar e o texto contendo a pontuação", "del self.cobra[0] ''' Método morreu, verifica se a cobra comeu ela mesma, se", "= preto # else: # self.fundo = branco ''' Limpa a tela a", "== self.cabeca for Bloco in self.cobra[:-1]): return True return False ''' Método reinicia,", "o código, altura e largura da tela, tamanho da cobra e maçã, tamanho", "tela ''' # textoPerdeuSombra = Texto(\"Snake Game\", cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu", "caso estejam, a maçã é reposicionada, a cobra aumenta e o placar de", "self.comp: del self.cobra[0] ''' Método morreu, verifica se a cobra comeu ela mesma,", "largura - tamanho, 20) self.y = randrange(0, altura - tamanho - placar, 20)", "jogo, relógio para definir o fps, fundo para desenhar tudo do jogo e", "self.fundo = preto # else: # self.fundo = branco ''' Limpa a tela", "voltar a jogar ou sair do jogo ''' def perdido(self): while self.perdeu: '''", "todo o restante do jogo, como variaveis de controle para continuar jogando, perder,", "51]) pygame.draw.rect(fundo, preto, [185, 270, 275, 47]) textoContinuar = Texto(\"Modo Livre\", branco, 70)", "o loop principal do jogo, que faz absolutamente tudo que acontece no jogo", "Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza a tela com todos os", "e o tamanho como parâmetros ''' class Texto: def __init__(self, msg, cor, tam):", "- placar, 20) ''' Método mostrar, desenha a maçã na tela ''' def", "= Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza a tela com todos", "''' Atualiza toda a tela com todos os elementos que foram desenhados anteriormente", "que faz absolutamente tudo que acontece no jogo ''' def iniciar(self): pontos_fundo =", "''' fundo.fill(self.fundo) ''' Checa para qual direção a cobra está seguindo e redefine", "= randrange(0, altura - tamanho - placar, 20) ''' Método mostrar, desenha a", "jogo ''' def iniciar(self): pontos_fundo = 0 while self.jogando: ''' Iterador de eventos,", "tamanho > altura - placar: self.pos_y = 0 if self.pos_y < 0: self.pos_y", "e largura da tela, tamanho da cobra e maçã, tamanho do placar e", "do modo, caso o modo escolhido no menu tenha sido o modo livre,", "textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [193,", "cada novo inicio de loop ''' fundo.fill(self.fundo) ''' Checa para qual direção a", "192, 192) laranja = (255, 69, 0) cinza = (79, 79, 79) cinzaClaro", "variaveis de controle para continuar jogando, perder, posição e velocidade da cobra, pontos,", "tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\", é verificado se o", "False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu = False if", "self.pos_x == self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo", "168, 369, 51]) pygame.draw.rect(fundo, preto, [145, 170, 365, 47]) textoContinuar = Texto(\"Voltar ao", "se retorna para o método iniciar ''' for event in pygame.event.get(): if event.type", "pygame foi iniciado corretamente ''' try: pygame.init() print(\"O modulo pygame foi inicializado com", "58]) pygame.draw.rect(fundo, preto, [195, 270, 275, 54]) textoContinuar = Texto(\"Novo Jogo\", branco, 70)", "objeto maçã, não recebe nenhum parâmetro, possui os atributos x e y que", "''' pygame.display.update() def menu(self): while self.noMenu: ''' Iterador de eventos, todos os eventos", "170, 365, 47]) textoContinuar = Texto(\"Voltar ao Menu\", branco, 70) textoContinuar.mostrar(150, 173) '''", "mouse_pos[1] if 143 < mouse_x < 143 + 369 and 168 < mouse_y", "altura]) pygame.draw.rect(fundo, branco, [0, 0, largura, 2]) pygame.draw.rect(fundo, branco, [largura - 2, 0,", "# pontos_fundo = 0 # if self.fundo == branco: # self.fundo = preto", "self.cobra.cresce() ''' Checa se o jogador ainda não perdeu o jogo ''' if", "e você poderá atravessar o mapa, mas caso tenha escolhido o modo clássico", "== pygame.K_UP and self.cobra.direcao != \"baixo\": self.cobra.direcao = \"cima\" if event.key == pygame.K_DOWN", "se a cobra comeu ela mesma, caso tenha comido o jogo é definido", "''' Aqui primeiro é feita a checagem do modo, caso o modo escolhido", "0, 2, altura]) pygame.draw.rect(fundo, branco, [0, 0, largura, 2]) pygame.draw.rect(fundo, branco, [largura -", "183 + 279 and 268 < mouse_y < 268 + 51: self.jogando =", "Utilizando um bloco de tentativa e erro para checar se o pygame foi", "+ 279 and 268 < mouse_y < 268 + 58: self.jogando = True", "ela mesma, se sim retorna verdadeiro, caso contrário, retorna falso ''' def morreu(self):", "maçã na tela ''' class Maca: def __init__(self): self.x = randrange(0, largura -", "''' Checa para qual direção a cobra está seguindo e redefine a nova", "= randrange(0, altura - tamanho - placar, 20) ''' Classe Jogo, definirá todo", "queira voltar, todo o jogo é redefinido e se retorna para o método", "de continuar jogando ''' pygame.draw.rect(fundo, prata, [143, 168, 359, 51]) pygame.draw.rect(fundo, preto, [145,", "= None self.fundo = preto self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y", "self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo += 1 ''' Aqui", "\"pygame.event.get()\", é verificado se o jogador quis sair do jogo ou quer voltar", "instancia = Jogo() instancia.menu() # Iniciando o jogo através da instância ''' Fecha", "self.y, tamanho, tamanho]) ''' Método reposicionar, define novos x e y aleatórios para", "self.y = randrange(0, altura - tamanho - placar, 20) ''' Método mostrar, desenha", "tamanho else: pass ''' Checa se a cobra e a maçã estão na", "369, 51]) pygame.draw.rect(fundo, preto, [145, 170, 365, 47]) textoContinuar = Texto(\"Voltar ao Menu\",", "é a posição da maçã na tela ''' class Maca: def __init__(self): self.x", "pygame.display.update() def menu(self): while self.noMenu: ''' Iterador de eventos, todos os eventos que", "# if pontos_fundo == 10: # pontos_fundo = 0 # if self.fundo ==", "para gerar numeros aleatórios para as posições da cobra e maçã ''' import", "= True self.perdido() ''' Desenha a cobra na tela ''' self.cobra.mostrar() ''' Desenha", "pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho]) indice += 1 ''' Método rastro, remove", "placar, 20) self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y =", "self.jogando = True self.noMenu = False self.perdeu = False self.modo = \"livre\" self.iniciar()", "= Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30)", "fundo para desenhar tudo do jogo e o título da janela do jogo", "Limpa a tela ''' fundo.fill(branco) ''' Desenha o titulo \"Snake Game\" na tela", "a tela com todos os elementos ''' pygame.display.update() ''' Instancia do jogo '''", "a cobra para a nova posição que é definida como parâmetro do método", "e redefine a nova posição naquela direção ''' if self.cobra.direcao == \"cima\": self.pos_y", "Método perdido, possui o loop da tela de derrota, faz tudo que acontece", "2]) if self.pos_x + tamanho > largura: self.jogando = False self.perdeu = True", "textoContinuar = Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza a tela com", "''' Define o fps do jogo ''' relogio.tick(15) ''' Método perdido, possui o", "self.pos_y + tamanho > altura - placar: self.jogando = False self.perdeu = True", "loop da tela de derrota, faz tudo que acontece ao perder, podendo o", "Livre\", branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza a tela com todos os elementos", "para continuar jogando, perder, posição e velocidade da cobra, pontos, bem como são", "= 0 # if self.fundo == branco: # self.fundo = preto # else:", "= (0, 0, 255) prata = (192, 192, 192) laranja = (255, 69,", "tela de derrota, faz tudo que acontece ao perder, podendo o jogador voltar", "self.noMenu: ''' Iterador de eventos, todos os eventos que acontecem durante o tempo", "False ''' Método reinicia, redefine todos os valores da cobra para os valores", "Método mostrar, desenha cada pedaço da cobra na tela ''' def mostrar(self): indice", "quando o tamanho do array é maior que o comprimento da cobra '''", "tamanho elif self.cobra.direcao == \"baixo\": self.pos_y += tamanho elif self.cobra.direcao == \"esquerda\": self.pos_x", "= 1 self.cobra = [self.cabeca] self.direcao = \"\" ''' Método move, recebe os", "self.perdido() if self.pos_y < 0: self.jogando = False self.perdeu = True self.perdido() '''", "self.velocidade_y = 0 self.pontos = 0 ''' Limpa a tela ''' fundo.fill(branco) '''", "= False self.noMenu = False self.modo = \"classico\" self.iniciar() if 183 < mouse_x", "que será o local na tela onde ela começará o jogo ''' class", "= Texto(\"Snake Game\", cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake Game\", preto,", "+ str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar = Texto(\"Pontuação:\" + str(self.pontos),", "altura]) pygame.draw.rect(fundo, branco, [0, altura - placar - 2, largura, 2]) if self.pos_x", "foi apertada para mover a cobra ''' for event in pygame.event.get(): if event.type", "das váriaveis globais que utilizaremos em todo o código, altura e largura da", "!= \"cima\": self.cobra.direcao = \"baixo\" if event.key == pygame.K_SPACE: self.pontos += 1 pontos_fundo", "tudo que acontece no jogo ''' def iniciar(self): pontos_fundo = 0 while self.jogando:", "elementos que foram desenhados anteriormente ''' pygame.display.update() ''' Define o fps do jogo", "depois de ter perdido o jogados possa continuar jogando ''' def reinicia(self, x,", "e o título da janela do jogo ''' relogio = pygame.time.Clock() fundo =", "altura - placar - 2, largura, 2]) if self.pos_x + tamanho > largura:", "== \"cima\": self.pos_y -= tamanho elif self.cobra.direcao == \"baixo\": self.pos_y += tamanho elif", "altura - placar: self.pos_y = 0 if self.pos_y < 0: self.pos_y = altura", "''' fundo.fill(branco) ''' Desenha o titulo \"Snake Game\" na tela ''' # textoPerdeuSombra", "pontos_fundo = 0 while self.jogando: ''' Iterador de eventos, todos os eventos que", "return False ''' Método reinicia, redefine todos os valores da cobra para os", "# else: # self.fundo = branco ''' Limpa a tela a cada novo", "tela o texto criado pelo construtor da classe ''' def mostrar(self, x, y):", "Maca: def __init__(self): self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0,", "''' Atualiza a tela com todos os elementos ''' pygame.display.update() def menu(self): while", "livre, o jogo não possuirá bordas e você poderá atravessar o mapa, mas", "cobra na tela ''' self.cobra.mostrar() ''' Desenha o placar e o texto contendo", "são criados os objetos maçã e cobra, não recebe parâmetros ''' # noinspection", "[193, 268, 279, 58]) pygame.draw.rect(fundo, preto, [195, 270, 275, 54]) textoContinuar = Texto(\"Novo", "+ str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha o botão de voltar ao", "self.cobra[0] ''' Método morreu, verifica se a cobra comeu ela mesma, se sim", "from random import randrange print(\"Módulos importados com sucesso\") ''' Utilizando um bloco de", "ser obtidos pelo \"pygame.event.get()\", sendo assim verificado se o jogo não foi fechado,", "143 + 369 and 168 < mouse_y < 168 + 51: self.jogando =", "Texto(\"Pontuação:\" + str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura - 30) ''' Desenha a maçã", "naquela direção ''' if self.cobra.direcao == \"cima\": self.pos_y -= tamanho elif self.cobra.direcao ==", "será o local na tela onde ela começará o jogo ''' class Cobra:", "fundo.fill(branco) ''' Desenha o titulo \"Snake Game\" na tela ''' # textoPerdeuSombra =", "construtor da classe ''' def mostrar(self, x, y): fundo.blit(self.texto, [x, y]) ''' Classe", "rastro(self): if len(self.cobra) > self.comp: del self.cobra[0] ''' Método morreu, verifica se a", "fps do jogo ''' relogio.tick(15) ''' Método perdido, possui o loop da tela", "tudo do jogo e o título da janela do jogo ''' relogio =", "morreu, verifica se a cobra comeu ela mesma, se sim retorna verdadeiro, caso", "relógio para definir o fps, fundo para desenhar tudo do jogo e o", "modo, caso o modo escolhido no menu tenha sido o modo livre, o", "é feita a checagem do modo, caso o modo escolhido no menu tenha", "o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [143, 168, 359, 51]) pygame.draw.rect(fundo,", "altura - tamanho - placar else: pygame.draw.rect(fundo, branco, [0, 0, 2, altura]) pygame.draw.rect(fundo,", "todo o código, altura e largura da tela, tamanho da cobra e maçã,", "100) ''' Desenha o botão de voltar ao menu de seleção ''' pygame.draw.rect(fundo,", "if event.type == pygame.QUIT: self.jogando = False self.perdeu = False if event.type ==", "local na tela onde ela começará o jogo ''' class Cobra: def __init__(self,", "''' self.maca.mostrar() ''' Atualiza toda a tela com todos os elementos que foram", "de continuar jogando ''' pygame.draw.rect(fundo, prata, [183, 268, 279, 51]) pygame.draw.rect(fundo, preto, [185,", "True self.perdido() if self.pos_x < 0: self.jogando = False self.perdeu = True self.perdido()", "[largura - 2, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, altura - placar -", "largura da tela, tamanho da cobra e maçã, tamanho do placar e cores", "o jogo é definido perdido, e o método \"perdido\" é chamado ''' if", "True, cor) ''' Método mostrar desenha na tela o texto criado pelo construtor", "20 placar = 40 branco = (255, 255, 255) preto = (0, 0,", "feita a checagem do modo, caso o modo escolhido no menu tenha sido", "possui os atributos x e y que é a posição da maçã na", "retorna falso ''' def morreu(self): if any(Bloco == self.cabeca for Bloco in self.cobra[:-1]):", "e a maçã estão na mesma posição, caso estejam, a maçã é reposicionada,", "''' Desenha o botão de voltar ao menu de seleção ''' pygame.draw.rect(fundo, prata,", "mouse_pos[1] if 143 < mouse_x < 143 + 359 and 168 < mouse_y", "cobra ''' def cresce(self): self.comp += 1 ''' Método mostrar, desenha cada pedaço", "preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo,", "cobra na tela ''' def mostrar(self): indice = 0 for XY in self.cobra:", "if self.pos_y + tamanho > altura - placar: self.pos_y = 0 if self.pos_y", "if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando = False self.perdeu =", "\"cima\" if event.key == pygame.K_DOWN and self.cobra.direcao != \"cima\": self.cobra.direcao = \"baixo\" if", "== pygame.QUIT: self.jogando = False self.perdeu = False if event.type == pygame.KEYDOWN: if", "a nova posição naquela direção ''' if self.cobra.direcao == \"cima\": self.pos_y -= tamanho", "if event.type == pygame.QUIT: self.jogando = False break if event.type == pygame.KEYDOWN: if", "tamanho > altura - placar: self.jogando = False self.perdeu = True self.perdido() if", "com sucesso\") ''' Utilizando um bloco de tentativa e erro para checar se", "na tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho]) ''' Método", "Desenha a maçã na tela ''' self.maca.mostrar() ''' Atualiza toda a tela com", "e maçã, tamanho do placar e cores no formato RGB''' largura = 640", "+= 1 ''' Método mostrar, desenha cada pedaço da cobra na tela '''", "para qual direção a cobra está seguindo e redefine a nova posição naquela", "modo escolhido no menu tenha sido o modo livre, o jogo não possuirá", "valores iniciais, para caso depois de ter perdido o jogados possa continuar jogando", "class Jogo: def __init__(self): self.jogando = False self.perdeu = False self.noMenu = True", "Game\" na tela ''' # textoPerdeuSombra = Texto(\"Snake Game\", cinza, 100) # textoPerdeuSombra.mostrar(108,", "220, 220) ''' Definição de configurações do jogo, relógio para definir o fps,", "contém a posição de cada pedaço da cobra, recebe as coordenadas x e", "jogo, que faz absolutamente tudo que acontece no jogo ''' def iniciar(self): pontos_fundo", "self.iniciar() ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha o titulo \"Snake Game\"", "criado pelo construtor da classe ''' def mostrar(self, x, y): fundo.blit(self.texto, [x, y])", "o tamanho como parâmetros ''' class Texto: def __init__(self, msg, cor, tam): self.font", "Método morreu, verifica se a cobra comeu ela mesma, se sim retorna verdadeiro,", "self.comp += 1 ''' Método mostrar, desenha cada pedaço da cobra na tela", "= (79, 79, 79) cinzaClaro = (220, 220, 220) ''' Definição de configurações", "self.cobra.direcao == \"cima\": self.pos_y -= tamanho elif self.cobra.direcao == \"baixo\": self.pos_y += tamanho", "mais jogando porque perdeu e é chamado o método \"perdido\" ''' if self.modo", "vermelho = (255, 0, 0) verde = (0, 200, 0) verde_escuro = (0,", "array que contém a posição de cada pedaço da cobra, recebe as coordenadas", "+ tamanho > largura: self.pos_x = 0 if self.pos_x < 0: self.pos_x =", "ser obtidos pelo \"pygame.event.get()\", é verificado se o jogador quis sair do jogo", "369 and 168 < mouse_y < 168 + 51: self.jogando = False self.perdeu", "tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.velocidade_x =", "cobra definirá os elementos do objeto cobra, como cabeça, comprimento e direção, bem", "o jogo não possuirá bordas e você poderá atravessar o mapa, mas caso", "tela ''' textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\", vermelho,", "51]) pygame.draw.rect(fundo, preto, [145, 170, 355, 47]) textoContinuar = Texto(\"Modo Clássico\", branco, 70)", "possui o loop principal do jogo, que faz absolutamente tudo que acontece no", "acontece ao perder, podendo o jogador voltar a jogar ou sair do jogo", "''' Limpa o rastro deixado pelo blocos adicionais ''' self.cobra.rastro() ''' Checa se", "self.modo = \"livre\" self.iniciar() ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha o", "da cobra para os valores iniciais, para caso depois de ter perdido o", "os elementos que foram desenhados anteriormente ''' pygame.display.update() ''' Define o fps do", "= False self.noMenu = True self.modo = None self.fundo = preto self.pos_x =", "== len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde,", "se o jogador ainda não perdeu o jogo ''' if self.jogando: ''' Descomente", "sair do jogo ''' def perdido(self): while self.perdeu: ''' Iterador de eventos, todos", "= randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura - tamanho -", "Classe maçã que definirá o objeto maçã, não recebe nenhum parâmetro, possui os", "placar, 20) ''' Classe Jogo, definirá todo o restante do jogo, como variaveis", "de pontos aumenta ''' if self.pos_x == self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar()", "self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 ''' Limpa a", "do jogo, relógio para definir o fps, fundo para desenhar tudo do jogo", "nenhuma das setas foi apertada para mover a cobra ''' for event in", "direção ''' if self.cobra.direcao == \"cima\": self.pos_y -= tamanho elif self.cobra.direcao == \"baixo\":", "Instancia do jogo ''' if __name__ == '__main__': instancia = Jogo() instancia.menu() #", "= False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y", "< 168 + 51: self.jogando = True self.perdeu = False self.noMenu = False", "\"cima\": self.cobra.direcao = \"baixo\" if event.key == pygame.K_SPACE: self.pontos += 1 pontos_fundo +=", "\"esquerda\" if event.key == pygame.K_RIGHT and self.cobra.direcao != \"esquerda\": self.cobra.direcao = \"direita\" if", "= randrange(0, altura - tamanho - placar, 20) self.velocidade_x = 0 self.velocidade_y =", "e o placar de pontos aumenta ''' if self.pos_x == self.maca.x and self.pos_y", "Método rastro, remove a cauda quando o tamanho do array é maior que", "100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110, 30) '''", "''' class Texto: def __init__(self, msg, cor, tam): self.font = pygame.font.SysFont(None, tam) self.texto", "pygame.QUIT: self.jogando = False break if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT", "jogo ''' relogio.tick(15) ''' Método perdido, possui o loop da tela de derrota,", "preto, [145, 170, 365, 47]) textoContinuar = Texto(\"Voltar ao Menu\", branco, 70) textoContinuar.mostrar(150,", "posição, caso estejam, a maçã é reposicionada, a cobra aumenta e o placar", "and self.cobra.direcao != \"baixo\": self.cobra.direcao = \"cima\" if event.key == pygame.K_DOWN and self.cobra.direcao", "podendo o jogador voltar a jogar ou sair do jogo ''' def perdido(self):", "caso contrário, retorna falso ''' def morreu(self): if any(Bloco == self.cabeca for Bloco", "pygame.K_LEFT and self.cobra.direcao != \"direita\": self.cobra.direcao = \"esquerda\" if event.key == pygame.K_RIGHT and", "pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho])", "novos x e y aleatórios para a maçã após ser comida pela cobra", "juntamente do modulo locals dela, além disso o metodo randrange que usaremos para", "podem ser obtidos pelo \"pygame.event.get()\", sendo assim verificado se o jogo não foi", "> altura - placar: self.pos_y = 0 if self.pos_y < 0: self.pos_y =", "posições ''' def move(self, x, y): self.cabeca = [x, y] self.cobra.append([x, y]) '''", "Game\", cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110,", "pedaço da cobra na tela ''' def mostrar(self): indice = 0 for XY", "255, 255) preto = (0, 0, 0) vermelho = (255, 0, 0) verde", "str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação Final: \" + str(self.pontos), prata,", "event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu = False if event.type ==", "x, y): self.cabeca = [x, y] self.cobra.append([x, y]) ''' Método cresce, aumenta o", "os parâmetro x e y, que serão as novas coordenadas da cabeça e", "definido perdido, e o método \"perdido\" é chamado ''' if self.cobra.morreu(): self.jogando =", "\"\" ''' Método move, recebe os parâmetro x e y, que serão as", "Classe texto servirá para criar objetos de textop que serão exibidos nas telas", "jogando ''' pygame.draw.rect(fundo, prata, [143, 168, 359, 51]) pygame.draw.rect(fundo, preto, [145, 170, 355,", "''' textoPontuacaoSombra = Texto(\"Pontuação Final: \" + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao", "1 self.cobra = [self.cabeca] ''' Classe maçã que definirá o objeto maçã, não", "está seguindo e redefine a nova posição naquela direção ''' if self.cobra.direcao ==", "azul = (0, 0, 255) prata = (192, 192, 192) laranja = (255,", "aumenta ''' if self.pos_x == self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos", "a maçã estão na mesma posição, caso estejam, a maçã é reposicionada, a", "import randrange print(\"Módulos importados com sucesso\") ''' Utilizando um bloco de tentativa e", "de loop ''' fundo.fill(self.fundo) ''' Checa para qual direção a cobra está seguindo", "Desenha a cobra na tela ''' self.cobra.mostrar() ''' Desenha o placar e o", "eventos, todos os eventos que acontecem durante o tempo de execução estão podem", "como parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro deixado pelo", "268 < mouse_y < 268 + 58: self.jogando = True self.perdeu = False", "in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False break if event.type ==", "inicializado com sucesso\") ''' Declaração das váriaveis globais que utilizaremos em todo o", "iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False", "self.pos_x < 0: self.jogando = False self.perdeu = True self.perdido() if self.pos_y +", "método iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.noMenu =", "prata, [183, 268, 279, 51]) pygame.draw.rect(fundo, preto, [185, 270, 275, 47]) textoContinuar =", "que acontece no jogo ''' def iniciar(self): pontos_fundo = 0 while self.jogando: '''", "recebe parâmetros ''' # noinspection DuplicatedCode class Jogo: def __init__(self): self.jogando = False", "= 0 if self.pos_x < 0: self.pos_x = largura - tamanho if self.pos_y", "porque perdeu e é chamado o método \"perdido\" ''' if self.modo == \"livre\":", "textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [183,", "\"direita\": self.pos_x += tamanho else: pass ''' Checa se a cobra e a", "self.pos_y = altura - tamanho - placar else: pygame.draw.rect(fundo, branco, [0, 0, 2,", "a checagem do modo, caso o modo escolhido no menu tenha sido o", "# self.fundo = branco ''' Limpa a tela a cada novo inicio de", "= 0 self.velocidade_y = 0 self.pontos = 0 if 193 < mouse_x <", "100) textoPerdeu.mostrar(110, 30) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata,", "54]) textoContinuar = Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza a tela", "jogados possa continuar jogando ''' def reinicia(self, x, y): self.x = x self.y", "False self.modo = \"livre\" self.iniciar() ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha", "self.perdido() if self.pos_y + tamanho > altura - placar: self.jogando = False self.perdeu", "é definido perdido, e o método \"perdido\" é chamado ''' if self.cobra.morreu(): self.jogando", "+= 1 self.cobra.cresce() ''' Checa se o jogador ainda não perdeu o jogo", "comprimento da cobra ''' def rastro(self): if len(self.cobra) > self.comp: del self.cobra[0] '''", "if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1]", "\"perdido\" é chamado ''' if self.cobra.morreu(): self.jogando = False self.perdeu = True self.perdido()", "self.pos_y) self.maca = Maca() ''' Método iniciar, possui o loop principal do jogo,", "99) textoPontuacao = Texto(\"Pontuação Final: \" + str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) '''", "menu tenha sido o modo livre, o jogo não possuirá bordas e você", "tenha ultrapassado é definido que não se está mais jogando porque perdeu e", "self.jogando: ''' Iterador de eventos, todos os eventos que acontecem durante o tempo", "event.key == pygame.K_ESCAPE: self.noMenu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos()", "1 self.cobra.cresce() ''' Checa se o jogador ainda não perdeu o jogo '''", "foi inicializado com sucesso\") except: print(\"O modulo pygame não foi inicializado com sucesso\")", "def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho]) ''' Método reposicionar, define novos", "while self.noMenu: ''' Iterador de eventos, todos os eventos que acontecem durante o", "verde = (0, 200, 0) verde_escuro = (0, 150, 0) azul = (0,", "20) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 self.cobra = Cobra(self.pos_x,", "com sucesso\") ''' Declaração das váriaveis globais que utilizaremos em todo o código,", "self.perdeu = False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando =", "Atualiza a tela com todos os elementos ''' pygame.display.update() def menu(self): while self.noMenu:", "Cobra: def __init__(self, x, y): self.x = x self.y = y self.cabeca =", "os valores iniciais, para caso depois de ter perdido o jogados possa continuar", "''' Método reinicia, redefine todos os valores da cobra para os valores iniciais,", "retorna verdadeiro, caso contrário, retorna falso ''' def morreu(self): if any(Bloco == self.cabeca", "self.pontos += 1 pontos_fundo += 1 ''' Aqui primeiro é feita a checagem", "275, 54]) textoContinuar = Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza a", "branco, 25) textoPlacar.mostrar(10, altura - 30) ''' Desenha a maçã na tela '''", "textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação", "self.iniciar() if 183 < mouse_x < 183 + 279 and 268 < mouse_y", "0) vermelho = (255, 0, 0) verde = (0, 200, 0) verde_escuro =", "\"livre\": if self.pos_x + tamanho > largura: self.pos_x = 0 if self.pos_x <", "self.fundo == branco: # self.fundo = preto # else: # self.fundo = branco", "botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [143, 168, 359, 51]) pygame.draw.rect(fundo, preto,", "for event in pygame.event.get(): if event.type == pygame.QUIT: self.noMenu = False if event.type", "e y aleatórios para a maçã após ser comida pela cobra ''' def", "else: # self.fundo = branco ''' Limpa a tela a cada novo inicio", "= mouse_pos[1] if 143 < mouse_x < 143 + 359 and 168 <", "- tamanho - placar, 20) ''' Classe Jogo, definirá todo o restante do", "erro para checar se o pygame foi iniciado corretamente ''' try: pygame.init() print(\"O", "sendo assim verificado se o jogo não foi fechado, bem como se nenhuma", "80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha a", "51: self.jogando = True self.noMenu = False self.perdeu = False self.modo = \"livre\"", "faz''' # if pontos_fundo == 10: # pontos_fundo = 0 # if self.fundo", "branco: # self.fundo = preto # else: # self.fundo = branco ''' Limpa", "1 self.cobra = [self.cabeca] self.direcao = \"\" ''' Método move, recebe os parâmetro", "o array que contém a posição de cada pedaço da cobra, recebe as", "tamanho - placar, 20) self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0", "aumenta e o placar de pontos aumenta ''' if self.pos_x == self.maca.x and", "79) cinzaClaro = (220, 220, 220) ''' Definição de configurações do jogo, relógio", "velocidade da cobra, pontos, bem como são criados os objetos maçã e cobra,", "método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro deixado pelo blocos adicionais '''", "pygame.display.update() ''' Instancia do jogo ''' if __name__ == '__main__': instancia = Jogo()", "- 2, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, altura - placar - 2,", "placar: self.jogando = False self.perdeu = True self.perdido() if self.pos_y < 0: self.jogando", "pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if 143 <", "mouse_y < 268 + 58: self.jogando = True self.perdeu = False self.pos_x =", "- placar, 20) self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y", "''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [193, 268, 279,", "como parâmetros ''' class Texto: def __init__(self, msg, cor, tam): self.font = pygame.font.SysFont(None,", "usaremos para gerar numeros aleatórios para as posições da cobra e maçã '''", "mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x", "jogando ''' def reinicia(self, x, y): self.x = x self.y = y self.cabeca", "< 0: self.jogando = False self.perdeu = True self.perdido() if self.pos_y + tamanho", "pontos_fundo = 0 # if self.fundo == branco: # self.fundo = preto #", "143 < mouse_x < 143 + 369 and 168 < mouse_y < 168", "falso ''' def morreu(self): if any(Bloco == self.cabeca for Bloco in self.cobra[:-1]): return", "= False self.perdeu = True self.perdido() if self.pos_y + tamanho > altura -", "redefinido e se retorna para o método iniciar ''' for event in pygame.event.get():", "if self.jogando: ''' Descomente e descubra o que isso faz''' # if pontos_fundo", "ao perder, podendo o jogador voltar a jogar ou sair do jogo '''", "textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação Final: \" + str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100)", "com todos os elementos ''' pygame.display.update() ''' Instancia do jogo ''' if __name__", "relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe texto servirá", "Método reinicia, redefine todos os valores da cobra para os valores iniciais, para", "que é definida como parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o", "= Texto(\"Pontuação:\" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar = Texto(\"Pontuação:\"", "''' def cresce(self): self.comp += 1 ''' Método mostrar, desenha cada pedaço da", "textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha o", "1 pontos_fundo += 1 self.cobra.cresce() ''' Checa se o jogador ainda não perdeu", "self.cobra.direcao != \"direita\": self.cobra.direcao = \"esquerda\" if event.key == pygame.K_RIGHT and self.cobra.direcao !=", "todos os eventos que acontecem durante o tempo de execução estão podem ser", "False self.noMenu = True self.modo = None self.fundo = preto self.pos_x = randrange(0,", "continuar jogando ''' pygame.draw.rect(fundo, prata, [193, 268, 279, 58]) pygame.draw.rect(fundo, preto, [195, 270,", "e cores no formato RGB''' largura = 640 altura = 480 tamanho =", "self.jogando = False self.perdeu = True self.perdido() if self.pos_x < 0: self.jogando =", "2]) pygame.draw.rect(fundo, branco, [largura - 2, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, altura", "= 20 placar = 40 branco = (255, 255, 255) preto = (0,", "self.cobra.morreu(): self.jogando = False self.perdeu = True self.perdido() ''' Desenha a cobra na", "20) ''' Classe Jogo, definirá todo o restante do jogo, como variaveis de", "0 self.velocidade_y = 0 self.pontos = 0 ''' Limpa a tela ''' fundo.fill(branco)", "0, 0) vermelho = (255, 0, 0) verde = (0, 200, 0) verde_escuro", "- tamanho, 20) self.y = randrange(0, altura - tamanho - placar, 20) '''", "sido o modo livre, o jogo não possuirá bordas e você poderá atravessar", "= 0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca = Maca() ''' Método iniciar, possui", "0 while self.jogando: ''' Iterador de eventos, todos os eventos que acontecem durante", "escolhido no menu tenha sido o modo livre, o jogo não possuirá bordas", "= pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True, cor) ''' Método mostrar desenha na", "pygame.K_UP and self.cobra.direcao != \"baixo\": self.cobra.direcao = \"cima\" if event.key == pygame.K_DOWN and", "o comprimento da cobra ''' def rastro(self): if len(self.cobra) > self.comp: del self.cobra[0]", "telas do jogo, recebe a mensagem a cor e o tamanho como parâmetros", "ultrapassou alguma das bordas, caso tenha ultrapassado é definido que não se está", "tamanho, 20) self.y = randrange(0, altura - tamanho - placar, 20) ''' Classe", "+= 1 ''' Método rastro, remove a cauda quando o tamanho do array", "descubra o que isso faz''' # if pontos_fundo == 10: # pontos_fundo =", "None self.fundo = preto self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y =", "1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho,", "= False self.modo = \"classico\" self.iniciar() if 183 < mouse_x < 183 +", "desenhar tudo do jogo e o título da janela do jogo ''' relogio", "restante do jogo, como variaveis de controle para continuar jogando, perder, posição e", "self.fundo = branco ''' Limpa a tela a cada novo inicio de loop", "tamanho if self.pos_y + tamanho > altura - placar: self.pos_y = 0 if", "o método \"perdido\" é chamado ''' if self.cobra.morreu(): self.jogando = False self.perdeu =", "pygame.K_SPACE: self.pontos += 1 pontos_fundo += 1 self.cobra.cresce() ''' Checa se o jogador", "0, 0) verde = (0, 200, 0) verde_escuro = (0, 150, 0) azul", "''' self.cobra.mostrar() ''' Desenha o placar e o texto contendo a pontuação atual", "contendo a pontuação atual ''' pygame.draw.rect(fundo, branco, [0, altura - placar, largura, placar])", "self.pontos = 0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca = Maca() ''' Método iniciar,", "novo inicio de loop ''' fundo.fill(self.fundo) ''' Checa para qual direção a cobra", "prata, [143, 168, 359, 51]) pygame.draw.rect(fundo, preto, [145, 170, 355, 47]) textoContinuar =", "servirá para criar objetos de textop que serão exibidos nas telas do jogo,", "''' class Maca: def __init__(self): self.x = randrange(0, largura - tamanho, 20) self.y", "class Cobra: def __init__(self, x, y): self.x = x self.y = y self.cabeca", "maçã após ser comida pela cobra ''' def reposicionar(self): self.x = randrange(0, largura", "False self.modo = \"classico\" self.iniciar() if 183 < mouse_x < 183 + 279", "if self.pos_x + tamanho > largura: self.jogando = False self.perdeu = True self.perdido()", "a pontuação atual ''' pygame.draw.rect(fundo, branco, [0, altura - placar, largura, placar]) textoPlacarSombra", "tamanho da cobra e maçã, tamanho do placar e cores no formato RGB'''", "é maior que o comprimento da cobra ''' def rastro(self): if len(self.cobra) >", "self.noMenu = True self.modo = None self.fundo = preto self.pos_x = randrange(0, largura", "placar = 40 branco = (255, 255, 255) preto = (0, 0, 0)", "de controle para continuar jogando, perder, posição e velocidade da cobra, pontos, bem", "if event.key == pygame.K_ESCAPE: self.noMenu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos =", "desenha na tela o texto criado pelo construtor da classe ''' def mostrar(self,", "self.perdeu = True self.perdido() ''' Move a cobra para a nova posição que", "170, 355, 47]) textoContinuar = Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha", "Definição de configurações do jogo, relógio para definir o fps, fundo para desenhar", "que usaremos para gerar numeros aleatórios para as posições da cobra e maçã", "cabeça, comprimento e direção, bem como o array que contém a posição de", "def __init__(self): self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0, altura", "if self.cobra.morreu(): self.jogando = False self.perdeu = True self.perdido() ''' Desenha a cobra", "# noinspection DuplicatedCode class Jogo: def __init__(self): self.jogando = False self.perdeu = False", "largura: self.jogando = False self.perdeu = True self.perdido() if self.pos_x < 0: self.jogando", "pygame foi inicializado com sucesso\") except: print(\"O modulo pygame não foi inicializado com", "def __init__(self, msg, cor, tam): self.font = pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True,", "tamanho do array é maior que o comprimento da cobra ''' def rastro(self):", "largura - tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar, 20)", "janela do jogo ''' relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\")", "cobra comeu ela mesma, se sim retorna verdadeiro, caso contrário, retorna falso '''", "for Bloco in self.cobra[:-1]): return True return False ''' Método reinicia, redefine todos", "event.key == pygame.K_UP and self.cobra.direcao != \"baixo\": self.cobra.direcao = \"cima\" if event.key ==", "+ tamanho > altura - placar: self.jogando = False self.perdeu = True self.perdido()", "= False self.noMenu = True self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y", "= 480 tamanho = 20 placar = 40 branco = (255, 255, 255)", "- placar, 20) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 self.cobra", "''' pygame.display.update() ''' Define o fps do jogo ''' relogio.tick(15) ''' Método perdido,", "tela com todos os elementos ''' pygame.display.update() ''' Instancia do jogo ''' if", "retorna para o método iniciar ''' for event in pygame.event.get(): if event.type ==", "''' Declaração das váriaveis globais que utilizaremos em todo o código, altura e", "self.cobra.direcao = \"baixo\" if event.key == pygame.K_SPACE: self.pontos += 1 pontos_fundo += 1", "pygame.draw.rect(fundo, preto, [195, 270, 275, 54]) textoContinuar = Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210,", "def reinicia(self, x, y): self.x = x self.y = y self.cabeca = [x,", "na tela ''' # textoPerdeuSombra = Texto(\"Snake Game\", cinza, 100) # textoPerdeuSombra.mostrar(108, 28)", "tenha sido o modo livre, o jogo não possuirá bordas e você poderá", "placar, largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura -", "+= 1 ''' Aqui primeiro é feita a checagem do modo, caso o", "event.key == pygame.K_SPACE: self.pontos += 1 pontos_fundo += 1 self.cobra.cresce() ''' Checa se", "se a cobra ultrapassou alguma das bordas, caso tenha ultrapassado é definido que", "50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação Final: \" + str(self.pontos), prata, 50) textoPontuacao.mostrar(180,", "pass ''' Checa se a cobra e a maçã estão na mesma posição,", "tamanho > largura: self.jogando = False self.perdeu = True self.perdido() if self.pos_x <", "pygame.draw.rect(fundo, branco, [0, altura - placar - 2, largura, 2]) if self.pos_x +", "= [x, y] self.comp = 1 self.cobra = [self.cabeca] ''' Classe maçã que", "verifica se a cobra comeu ela mesma, se sim retorna verdadeiro, caso contrário,", "isso faz''' # if pontos_fundo == 10: # pontos_fundo = 0 # if", "o modo escolhido no menu tenha sido o modo livre, o jogo não", "+ 51: self.jogando = False self.perdeu = False self.noMenu = True self.pos_x =", "\" + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação Final: \" +", "rastro deixado pelo blocos adicionais ''' self.cobra.rastro() ''' Checa se a cobra comeu", "self.direcao = \"\" ''' Método move, recebe os parâmetro x e y, que", "poderá atravessar o mapa, mas caso tenha escolhido o modo clássico é checado", "elif self.cobra.direcao == \"baixo\": self.pos_y += tamanho elif self.cobra.direcao == \"esquerda\": self.pos_x -=", "[x, y] self.comp = 1 self.cobra = [self.cabeca] self.direcao = \"\" ''' Método", "self.cabeca for Bloco in self.cobra[:-1]): return True return False ''' Método reinicia, redefine", "fechado, bem como se nenhuma das setas foi apertada para mover a cobra", "tamanho]) indice += 1 ''' Método rastro, remove a cauda quando o tamanho", "largura - tamanho if self.pos_y + tamanho > altura - placar: self.pos_y =", "textoPontuacao = Texto(\"Pontuação Final: \" + str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha", "Game\") ''' Classe texto servirá para criar objetos de textop que serão exibidos", "tam) self.texto = self.font.render(msg, True, cor) ''' Método mostrar desenha na tela o", "''' pygame.draw.rect(fundo, branco, [0, altura - placar, largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\" +", "while self.perdeu: ''' Iterador de eventos, todos os eventos que acontecem durante o", "= [self.cabeca] self.direcao = \"\" ''' Método move, recebe os parâmetro x e", "False self.perdeu = True self.perdido() ''' Desenha a cobra na tela ''' self.cobra.mostrar()", "''' Método mostrar, desenha cada pedaço da cobra na tela ''' def mostrar(self):", "\"<NAME>\" na tela ''' textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu =", "640 altura = 480 tamanho = 20 placar = 40 branco = (255,", "cabeça no array das posições ''' def move(self, x, y): self.cabeca = [x,", "def cresce(self): self.comp += 1 ''' Método mostrar, desenha cada pedaço da cobra", "\"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0", "''' Instancia do jogo ''' if __name__ == '__main__': instancia = Jogo() instancia.menu()", "atual ''' pygame.draw.rect(fundo, branco, [0, altura - placar, largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\"", "= \"baixo\" if event.key == pygame.K_SPACE: self.pontos += 1 pontos_fundo += 1 self.cobra.cresce()", "cobra para a nova posição que é definida como parâmetro do método '''", "= False self.perdeu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x", "0 self.pontos = 0 if 193 < mouse_x < 193 + 279 and", "self.jogando = True self.perdeu = False self.noMenu = False self.modo = \"classico\" self.iniciar()", "acontece no jogo ''' def iniciar(self): pontos_fundo = 0 while self.jogando: ''' Iterador", "e se retorna para o método iniciar ''' for event in pygame.event.get(): if", "self.pos_x += tamanho else: pass ''' Checa se a cobra e a maçã", "o placar de pontos aumenta ''' if self.pos_x == self.maca.x and self.pos_y ==", "quis sair do jogo ou quer voltar a jogar, caso queira voltar, todo", "jogo, como variaveis de controle para continuar jogando, perder, posição e velocidade da", "[143, 168, 369, 51]) pygame.draw.rect(fundo, preto, [145, 170, 365, 47]) textoContinuar = Texto(\"Voltar", "= randrange(0, largura - tamanho, 20) self.y = randrange(0, altura - tamanho -", "self.perdeu: ''' Iterador de eventos, todos os eventos que acontecem durante o tempo", "Checa se a cobra comeu ela mesma, caso tenha comido o jogo é", "e velocidade da cobra, pontos, bem como são criados os objetos maçã e", "de continuar jogando ''' pygame.draw.rect(fundo, prata, [193, 268, 279, 58]) pygame.draw.rect(fundo, preto, [195,", "\"direita\" if event.key == pygame.K_UP and self.cobra.direcao != \"baixo\": self.cobra.direcao = \"cima\" if", "verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho]) indice", "(255, 0, 0) verde = (0, 200, 0) verde_escuro = (0, 150, 0)", "que acontecem durante o tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\",", "''' Classe texto servirá para criar objetos de textop que serão exibidos nas", "Método iniciar, possui o loop principal do jogo, que faz absolutamente tudo que", "pygame.KEYDOWN: if event.key == pygame.K_LEFT and self.cobra.direcao != \"direita\": self.cobra.direcao = \"esquerda\" if", "placar: self.pos_y = 0 if self.pos_y < 0: self.pos_y = altura - tamanho", "o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [193, 268, 279, 58]) pygame.draw.rect(fundo,", "70) textoContinuar.mostrar(190, 273) ''' Atualiza a tela com todos os elementos ''' pygame.display.update()", "execução estão podem ser obtidos pelo \"pygame.event.get()\", sendo assim verificado se o jogo", "< 268 + 58: self.jogando = True self.perdeu = False self.pos_x = randrange(0,", "self.cobra.direcao = \"cima\" if event.key == pygame.K_DOWN and self.cobra.direcao != \"cima\": self.cobra.direcao =", "''' def morreu(self): if any(Bloco == self.cabeca for Bloco in self.cobra[:-1]): return True", "''' Atualiza a tela com todos os elementos ''' pygame.display.update() ''' Instancia do", "bordas e você poderá atravessar o mapa, mas caso tenha escolhido o modo", "definirá o objeto maçã, não recebe nenhum parâmetro, possui os atributos x e", "x self.y = y self.cabeca = [x, y] self.comp = 1 self.cobra =", "= 0 self.velocidade_y = 0 self.pontos = 0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca", "o jogados possa continuar jogando ''' def reinicia(self, x, y): self.x = x", "posição e velocidade da cobra, pontos, bem como são criados os objetos maçã", "if event.key == pygame.K_SPACE: self.pontos += 1 pontos_fundo += 1 self.cobra.cresce() ''' Checa", "a jogar ou sair do jogo ''' def perdido(self): while self.perdeu: ''' Iterador", "268 + 58: self.jogando = True self.perdeu = False self.pos_x = randrange(0, largura", "cobra, como cabeça, comprimento e direção, bem como o array que contém a", "após ser comida pela cobra ''' def reposicionar(self): self.x = randrange(0, largura -", "0: self.pos_x = largura - tamanho if self.pos_y + tamanho > altura -", "placar de pontos aumenta ''' if self.pos_x == self.maca.x and self.pos_y == self.maca.y:", "Limpa o rastro deixado pelo blocos adicionais ''' self.cobra.rastro() ''' Checa se a", "== self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo += 1 ''' Aqui primeiro", "deixado pelo blocos adicionais ''' self.cobra.rastro() ''' Checa se a cobra comeu ela", "mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x < 143 + 369 and", "a cauda quando o tamanho do array é maior que o comprimento da", "que o comprimento da cobra ''' def rastro(self): if len(self.cobra) > self.comp: del", "= False self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura", "mouse_y = mouse_pos[1] if 143 < mouse_x < 143 + 369 and 168", "\"pygame.event.get()\", sendo assim verificado se o jogo não foi fechado, bem como se", "y aleatórios para a maçã após ser comida pela cobra ''' def reposicionar(self):", "objetos maçã e cobra, não recebe parâmetros ''' # noinspection DuplicatedCode class Jogo:", "\"direita\": self.cobra.direcao = \"esquerda\" if event.key == pygame.K_RIGHT and self.cobra.direcao != \"esquerda\": self.cobra.direcao", "''' relogio.tick(15) ''' Método perdido, possui o loop da tela de derrota, faz", "pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho]) ''' Método reposicionar, define novos x e", "\"esquerda\": self.cobra.direcao = \"direita\" if event.key == pygame.K_UP and self.cobra.direcao != \"baixo\": self.cobra.direcao", "Move a cobra para a nova posição que é definida como parâmetro do", "def mostrar(self, x, y): fundo.blit(self.texto, [x, y]) ''' Classe cobra definirá os elementos", "anteriormente ''' pygame.display.update() ''' Define o fps do jogo ''' relogio.tick(15) ''' Método", "Bloco in self.cobra[:-1]): return True return False ''' Método reinicia, redefine todos os", "70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata,", "self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca] ''' Classe maçã", "- tamanho if self.pos_y + tamanho > altura - placar: self.pos_y = 0", "pygame.draw.rect(fundo, preto, [185, 270, 275, 47]) textoContinuar = Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190,", "que isso faz''' # if pontos_fundo == 10: # pontos_fundo = 0 #", "- tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.velocidade_x", "que definirá o objeto maçã, não recebe nenhum parâmetro, possui os atributos x", "in pygame.event.get(): if event.type == pygame.QUIT: self.noMenu = False if event.type == pygame.KEYDOWN:", "que utilizaremos em todo o código, altura e largura da tela, tamanho da", "de execução estão podem ser obtidos pelo \"pygame.event.get()\", sendo assim verificado se o", "do jogo ''' relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") '''", "self.jogando = False self.perdeu = True self.perdido() if self.pos_y < 0: self.jogando =", "Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando", "''' Desenha a pontuação final do jogador ''' textoPontuacaoSombra = Texto(\"Pontuação Final: \"", "jogo ou quer voltar a jogar, caso queira voltar, todo o jogo é", "''' Desenha a maçã na tela ''' self.maca.mostrar() ''' Atualiza toda a tela", "self.perdeu = True self.perdido() if self.pos_x < 0: self.jogando = False self.perdeu =", "prata, [143, 168, 369, 51]) pygame.draw.rect(fundo, preto, [145, 170, 365, 47]) textoContinuar =", "posição da maçã na tela ''' class Maca: def __init__(self): self.x = randrange(0,", "Texto(\"Voltar ao Menu\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar", "a cobra está seguindo e redefine a nova posição naquela direção ''' if", "é redefinido e se retorna para o método iniciar ''' for event in", "a posição de cada pedaço da cobra, recebe as coordenadas x e y", "textoPlacarSombra.mostrar(9, altura - 31) textoPlacar = Texto(\"Pontuação:\" + str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura", "fundo.fill(branco) ''' Desenha \"<NAME>\" na tela ''' textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159,", "self.y = y self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca]", "True self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura -", "self.perdeu = False self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0,", "e y que é a posição da maçã na tela ''' class Maca:", "self.pos_x -= tamanho elif self.cobra.direcao == \"direita\": self.pos_x += tamanho else: pass '''", "# self.fundo = preto # else: # self.fundo = branco ''' Limpa a", "textoContinuar = Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de", "''' Move a cobra para a nova posição que é definida como parâmetro", "Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza a tela com todos os", "''' class Cobra: def __init__(self, x, y): self.x = x self.y = y", "if len(self.cobra) > self.comp: del self.cobra[0] ''' Método morreu, verifica se a cobra", "jogo ''' if self.jogando: ''' Descomente e descubra o que isso faz''' #", "textoPerdeu = Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha o botão de", "tela, tamanho da cobra e maçã, tamanho do placar e cores no formato", "para caso depois de ter perdido o jogados possa continuar jogando ''' def", "30) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [143, 168,", "a cobra e a maçã estão na mesma posição, caso estejam, a maçã", "''' Método morreu, verifica se a cobra comeu ela mesma, se sim retorna", "Texto(\"Snake Game\", cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake Game\", preto, 100)", "textoPerdeu.mostrar(110, 30) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [143,", "if self.pos_y < 0: self.pos_y = altura - tamanho - placar else: pygame.draw.rect(fundo,", "''' pygame.draw.rect(fundo, prata, [143, 168, 369, 51]) pygame.draw.rect(fundo, preto, [145, 170, 365, 47])", "self.cobra.direcao = \"esquerda\" if event.key == pygame.K_RIGHT and self.cobra.direcao != \"esquerda\": self.cobra.direcao =", "parâmetro, possui os atributos x e y que é a posição da maçã", "cobra ultrapassou alguma das bordas, caso tenha ultrapassado é definido que não se", "com sucesso\") except: print(\"O modulo pygame não foi inicializado com sucesso\") ''' Declaração", "255) prata = (192, 192, 192) laranja = (255, 69, 0) cinza =", "''' # noinspection DuplicatedCode class Jogo: def __init__(self): self.jogando = False self.perdeu =", "False self.perdeu = False self.noMenu = True self.modo = None self.fundo = preto", "x e y aleatórios para a maçã após ser comida pela cobra '''", "nas telas do jogo, recebe a mensagem a cor e o tamanho como", "Maca() ''' Método iniciar, possui o loop principal do jogo, que faz absolutamente", "cobra, pontos, bem como são criados os objetos maçã e cobra, não recebe", "= Texto(\"Voltar ao Menu\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de", "print(\"O modulo pygame foi inicializado com sucesso\") except: print(\"O modulo pygame não foi", "''' Definição de configurações do jogo, relógio para definir o fps, fundo para", "pygame.draw.rect(fundo, prata, [183, 268, 279, 51]) pygame.draw.rect(fundo, preto, [185, 270, 275, 47]) textoContinuar", "define novos x e y aleatórios para a maçã após ser comida pela", "= (192, 192, 192) laranja = (255, 69, 0) cinza = (79, 79,", "''' Método cresce, aumenta o comprimento da cobra ''' def cresce(self): self.comp +=", "a cor e o tamanho como parâmetros ''' class Texto: def __init__(self, msg,", "a maçã na tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho])", "sair do jogo ou quer voltar a jogar, caso queira voltar, todo o", "a posição da maçã na tela ''' class Maca: def __init__(self): self.x =", "e o método \"perdido\" é chamado ''' if self.cobra.morreu(): self.jogando = False self.perdeu", "if pontos_fundo == 10: # pontos_fundo = 0 # if self.fundo == branco:", "o método iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando", "mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x < 143 +", "return True return False ''' Método reinicia, redefine todos os valores da cobra", "a tela ''' fundo.fill(branco) ''' Desenha o titulo \"Snake Game\" na tela '''", "na tela onde ela começará o jogo ''' class Cobra: def __init__(self, x,", "def menu(self): while self.noMenu: ''' Iterador de eventos, todos os eventos que acontecem", "168, 359, 51]) pygame.draw.rect(fundo, preto, [145, 170, 355, 47]) textoContinuar = Texto(\"Modo Clássico\",", "tela ''' self.maca.mostrar() ''' Atualiza toda a tela com todos os elementos que", "textoContinuar.mostrar(210, 273) ''' Atualiza a tela com todos os elementos ''' pygame.display.update() def", "para a maçã após ser comida pela cobra ''' def reposicionar(self): self.x =", "DuplicatedCode class Jogo: def __init__(self): self.jogando = False self.perdeu = False self.noMenu =", "< 168 + 51: self.jogando = False self.perdeu = False self.noMenu = True", "posição que é definida como parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa", "= True self.perdido() ''' Move a cobra para a nova posição que é", "''' pygame.draw.rect(fundo, prata, [183, 268, 279, 51]) pygame.draw.rect(fundo, preto, [185, 270, 275, 47])", "if 143 < mouse_x < 143 + 369 and 168 < mouse_y <", "pontos_fundo += 1 ''' Aqui primeiro é feita a checagem do modo, caso", "''' # textoPerdeuSombra = Texto(\"Snake Game\", cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu =", "estão na mesma posição, caso estejam, a maçã é reposicionada, a cobra aumenta", "self.noMenu = False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu =", "apertada para mover a cobra ''' for event in pygame.event.get(): if event.type ==", "''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro deixado pelo blocos adicionais ''' self.cobra.rastro()", "texto criado pelo construtor da classe ''' def mostrar(self, x, y): fundo.blit(self.texto, [x,", "pontuação final do jogador ''' textoPontuacaoSombra = Texto(\"Pontuação Final: \" + str(self.pontos), cinza,", "cresce, aumenta o comprimento da cobra ''' def cresce(self): self.comp += 1 '''", "ela mesma, caso tenha comido o jogo é definido perdido, e o método", "a jogar, caso queira voltar, todo o jogo é redefinido e se retorna", "mostrar, desenha a maçã na tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y,", "y self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca] ''' Classe", "código, altura e largura da tela, tamanho da cobra e maçã, tamanho do", "True self.perdido() ''' Desenha a cobra na tela ''' self.cobra.mostrar() ''' Desenha o", "classe ''' def mostrar(self, x, y): fundo.blit(self.texto, [x, y]) ''' Classe cobra definirá", "+ str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação Final: \" + str(self.pontos),", "maior que o comprimento da cobra ''' def rastro(self): if len(self.cobra) > self.comp:", "self.y = randrange(0, altura - tamanho - placar, 20) ''' Classe Jogo, definirá", "event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False break if event.type", "ou quer voltar a jogar, caso queira voltar, todo o jogo é redefinido", "False break if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT and self.cobra.direcao !=", "[0, altura - placar - 2, largura, 2]) if self.pos_x + tamanho >", "iniciar, possui o loop principal do jogo, que faz absolutamente tudo que acontece", "''' A biblioteca pygame é importada, juntamente do modulo locals dela, além disso", "rastro, remove a cauda quando o tamanho do array é maior que o", "Checa se a cobra e a maçã estão na mesma posição, caso estejam,", "Define o fps do jogo ''' relogio.tick(15) ''' Método perdido, possui o loop", "além disso o metodo randrange que usaremos para gerar numeros aleatórios para as", "0 if self.pos_y < 0: self.pos_y = altura - tamanho - placar else:", "metodo randrange que usaremos para gerar numeros aleatórios para as posições da cobra", "XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho]) indice += 1", "= False self.perdeu = False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE:", "\"classico\" self.iniciar() if 183 < mouse_x < 183 + 279 and 268 <", "ela começará o jogo ''' class Cobra: def __init__(self, x, y): self.x =", "255) preto = (0, 0, 0) vermelho = (255, 0, 0) verde =", "''' Checa se o jogador ainda não perdeu o jogo ''' if self.jogando:", "== 10: # pontos_fundo = 0 # if self.fundo == branco: # self.fundo", "0 ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha \"<NAME>\" na tela '''", "False self.perdeu = True self.perdido() if self.pos_x < 0: self.jogando = False self.perdeu", "a tela com todos os elementos que foram desenhados anteriormente ''' pygame.display.update() '''", "parâmetro x e y, que serão as novas coordenadas da cabeça e insere", "= True self.modo = None self.fundo = preto self.pos_x = randrange(0, largura -", "[0, altura - placar, largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos), cinza, 25)", "= \"esquerda\" if event.key == pygame.K_RIGHT and self.cobra.direcao != \"esquerda\": self.cobra.direcao = \"direita\"", "textoPlacar = Texto(\"Pontuação:\" + str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura - 30) ''' Desenha", "- placar: self.jogando = False self.perdeu = True self.perdido() if self.pos_y < 0:", "event.key == pygame.K_LEFT and self.cobra.direcao != \"direita\": self.cobra.direcao = \"esquerda\" if event.key ==", "é importada, juntamente do modulo locals dela, além disso o metodo randrange que", "173) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [193, 268,", "Iniciando o jogo através da instância ''' Fecha a janela principal do jogo", "chamado o método \"perdido\" ''' if self.modo == \"livre\": if self.pos_x + tamanho", "menu de seleção ''' pygame.draw.rect(fundo, prata, [143, 168, 369, 51]) pygame.draw.rect(fundo, preto, [145,", "tela ''' class Maca: def __init__(self): self.x = randrange(0, largura - tamanho, 20)", "pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe texto servirá para criar", "modulo pygame foi inicializado com sucesso\") except: print(\"O modulo pygame não foi inicializado", "if event.key == pygame.K_DOWN and self.cobra.direcao != \"cima\": self.cobra.direcao = \"baixo\" if event.key", "= 640 altura = 480 tamanho = 20 placar = 40 branco =", "event.key == pygame.K_DOWN and self.cobra.direcao != \"cima\": self.cobra.direcao = \"baixo\" if event.key ==", "da cabeça e insere a nova cabeça no array das posições ''' def", "prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha o botão de voltar ao menu de", "sucesso\") except: print(\"O modulo pygame não foi inicializado com sucesso\") ''' Declaração das", "\"baixo\": self.cobra.direcao = \"cima\" if event.key == pygame.K_DOWN and self.cobra.direcao != \"cima\": self.cobra.direcao", "= pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x <", "self.perdeu = True self.perdido() if self.pos_y + tamanho > altura - placar: self.jogando", "serão exibidos nas telas do jogo, recebe a mensagem a cor e o", "indice += 1 ''' Método rastro, remove a cauda quando o tamanho do", "self.cobra.direcao != \"esquerda\": self.cobra.direcao = \"direita\" if event.key == pygame.K_UP and self.cobra.direcao !=", "== pygame.K_ESCAPE: self.jogando = False self.perdeu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos", "textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação final do jogador ''' textoPontuacaoSombra = Texto(\"Pontuação", "bem como o array que contém a posição de cada pedaço da cobra,", "event.key == pygame.K_ESCAPE: self.jogando = False self.perdeu = False if event.type == pygame.MOUSEBUTTONDOWN:", "self.jogando = False self.perdeu = False if event.type == pygame.KEYDOWN: if event.key ==", "== pygame.K_ESCAPE: self.noMenu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x", "== \"livre\": if self.pos_x + tamanho > largura: self.pos_x = 0 if self.pos_x", "True return False ''' Método reinicia, redefine todos os valores da cobra para", "self.perdeu = False self.noMenu = True self.modo = None self.fundo = preto self.pos_x", "''' Método reposicionar, define novos x e y aleatórios para a maçã após", "''' pygame.display.update() ''' Instancia do jogo ''' if __name__ == '__main__': instancia =", "perder, posição e velocidade da cobra, pontos, bem como são criados os objetos", "preto # else: # self.fundo = branco ''' Limpa a tela a cada", "28) textoPerdeu = Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha o botão", "= Jogo() instancia.menu() # Iniciando o jogo através da instância ''' Fecha a", "- placar, 20) ''' Classe Jogo, definirá todo o restante do jogo, como", "- tamanho - placar, 20) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos =", "!= \"esquerda\": self.cobra.direcao = \"direita\" if event.key == pygame.K_UP and self.cobra.direcao != \"baixo\":", "com todos os elementos que foram desenhados anteriormente ''' pygame.display.update() ''' Define o", "o que isso faz''' # if pontos_fundo == 10: # pontos_fundo = 0", "indice == len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo,", "branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza a tela com todos os elementos '''", "if event.key == pygame.K_LEFT and self.cobra.direcao != \"direita\": self.cobra.direcao = \"esquerda\" if event.key", "and 268 < mouse_y < 268 + 51: self.jogando = True self.noMenu =", "para a nova posição que é definida como parâmetro do método ''' self.cobra.move(self.pos_x,", "if self.pos_x < 0: self.jogando = False self.perdeu = True self.perdido() if self.pos_y", "se o jogador quis sair do jogo ou quer voltar a jogar, caso", "titulo \"Snake Game\" na tela ''' # textoPerdeuSombra = Texto(\"Snake Game\", cinza, 100)", "preto, [195, 270, 275, 54]) textoContinuar = Texto(\"Novo Jogo\", branco, 70) textoContinuar.mostrar(210, 273)", "modo livre, o jogo não possuirá bordas e você poderá atravessar o mapa,", "(0, 0, 0) vermelho = (255, 0, 0) verde = (0, 200, 0)", "a cobra aumenta e o placar de pontos aumenta ''' if self.pos_x ==", "= False self.perdeu = True self.perdido() if self.pos_x < 0: self.jogando = False", "y): self.x = x self.y = y self.cabeca = [x, y] self.comp =", "pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe texto servirá para criar objetos de textop", "y): self.cabeca = [x, y] self.cobra.append([x, y]) ''' Método cresce, aumenta o comprimento", "jogando ''' pygame.draw.rect(fundo, prata, [193, 268, 279, 58]) pygame.draw.rect(fundo, preto, [195, 270, 275,", "== self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo +=", "< mouse_x < 143 + 359 and 168 < mouse_y < 168 +", "''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False break", "0) verde = (0, 200, 0) verde_escuro = (0, 150, 0) azul =", "elif self.cobra.direcao == \"direita\": self.pos_x += tamanho else: pass ''' Checa se a", "# textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha", "continuar jogando ''' pygame.draw.rect(fundo, prata, [143, 168, 359, 51]) pygame.draw.rect(fundo, preto, [145, 170,", "cresce(self): self.comp += 1 ''' Método mostrar, desenha cada pedaço da cobra na", "estejam, a maçã é reposicionada, a cobra aumenta e o placar de pontos", "event in pygame.event.get(): if event.type == pygame.QUIT: self.noMenu = False if event.type ==", "= x self.y = y self.cabeca = [x, y] self.comp = 1 self.cobra", "self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro deixado pelo blocos adicionais ''' self.cobra.rastro() '''", "x, y): self.x = x self.y = y self.cabeca = [x, y] self.comp", "reinicia(self, x, y): self.x = x self.y = y self.cabeca = [x, y]", "da classe ''' def mostrar(self, x, y): fundo.blit(self.texto, [x, y]) ''' Classe cobra", "mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho]) ''' Método reposicionar, define novos x", "checagem do modo, caso o modo escolhido no menu tenha sido o modo", "elementos ''' pygame.display.update() ''' Instancia do jogo ''' if __name__ == '__main__': instancia", "< 143 + 369 and 168 < mouse_y < 168 + 51: self.jogando", "= \"\" ''' Método move, recebe os parâmetro x e y, que serão", "de configurações do jogo, relógio para definir o fps, fundo para desenhar tudo", "na tela ''' textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\",", "objeto cobra, como cabeça, comprimento e direção, bem como o array que contém", "tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho]) indice += 1 ''' Método", "0, largura, 2]) pygame.draw.rect(fundo, branco, [largura - 2, 0, 2, altura]) pygame.draw.rect(fundo, branco,", "cobra comeu ela mesma, caso tenha comido o jogo é definido perdido, e", "o fps do jogo ''' relogio.tick(15) ''' Método perdido, possui o loop da", "!= \"direita\": self.cobra.direcao = \"esquerda\" if event.key == pygame.K_RIGHT and self.cobra.direcao != \"esquerda\":", "do método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro deixado pelo blocos adicionais", "= largura - tamanho if self.pos_y + tamanho > altura - placar: self.pos_y", "tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.cobra.direcao =", "a cobra ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando =", "tenha comido o jogo é definido perdido, e o método \"perdido\" é chamado", "verdadeiro, caso contrário, retorna falso ''' def morreu(self): if any(Bloco == self.cabeca for", "''' if self.cobra.direcao == \"cima\": self.pos_y -= tamanho elif self.cobra.direcao == \"baixo\": self.pos_y", "verde_escuro = (0, 150, 0) azul = (0, 0, 255) prata = (192,", "try: pygame.init() print(\"O modulo pygame foi inicializado com sucesso\") except: print(\"O modulo pygame", "altura - placar: self.jogando = False self.perdeu = True self.perdido() if self.pos_y <", "10: # pontos_fundo = 0 # if self.fundo == branco: # self.fundo =", "self.cobra.direcao == \"baixo\": self.pos_y += tamanho elif self.cobra.direcao == \"esquerda\": self.pos_x -= tamanho", "importados com sucesso\") ''' Utilizando um bloco de tentativa e erro para checar", "quer voltar a jogar, caso queira voltar, todo o jogo é redefinido e", "- tamanho - placar else: pygame.draw.rect(fundo, branco, [0, 0, 2, altura]) pygame.draw.rect(fundo, branco,", "ou sair do jogo ''' def perdido(self): while self.perdeu: ''' Iterador de eventos,", "branco, [0, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, 0, largura, 2]) pygame.draw.rect(fundo, branco,", "Desenha \"<NAME>\" na tela ''' textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu", "= False self.modo = \"livre\" self.iniciar() ''' Limpa a tela ''' fundo.fill(branco) '''", "cobra e maçã ''' import pygame import pygame.locals from random import randrange print(\"Módulos", "instancia.menu() # Iniciando o jogo através da instância ''' Fecha a janela principal", "if self.pos_x == self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1", "'__main__': instancia = Jogo() instancia.menu() # Iniciando o jogo através da instância '''", "''' Utilizando um bloco de tentativa e erro para checar se o pygame", "jogo é definido perdido, e o método \"perdido\" é chamado ''' if self.cobra.morreu():", "cobra para os valores iniciais, para caso depois de ter perdido o jogados", "inicializado com sucesso\") except: print(\"O modulo pygame não foi inicializado com sucesso\") '''", "pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False self.perdeu = False if event.type", "Desenha o placar e o texto contendo a pontuação atual ''' pygame.draw.rect(fundo, branco,", "para mover a cobra ''' for event in pygame.event.get(): if event.type == pygame.QUIT:", "chamado ''' if self.cobra.morreu(): self.jogando = False self.perdeu = True self.perdido() ''' Desenha", "jogo, recebe a mensagem a cor e o tamanho como parâmetros ''' class", "prata, [193, 268, 279, 58]) pygame.draw.rect(fundo, preto, [195, 270, 275, 54]) textoContinuar =", "jogar, caso queira voltar, todo o jogo é redefinido e se retorna para", "[0, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, 0, largura, 2]) pygame.draw.rect(fundo, branco, [largura", "mesma, caso tenha comido o jogo é definido perdido, e o método \"perdido\"", "< 268 + 51: self.jogando = True self.noMenu = False self.perdeu = False", "o modo livre, o jogo não possuirá bordas e você poderá atravessar o", "== pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando = False self.perdeu = False if", "randrange print(\"Módulos importados com sucesso\") ''' Utilizando um bloco de tentativa e erro", "self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 ''' Limpa", "largura = 640 altura = 480 tamanho = 20 placar = 40 branco", "pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos", "ter perdido o jogados possa continuar jogando ''' def reinicia(self, x, y): self.x", "de textop que serão exibidos nas telas do jogo, recebe a mensagem a", "-= tamanho elif self.cobra.direcao == \"direita\": self.pos_x += tamanho else: pass ''' Checa", "''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.noMenu = False if", "cobra ''' def rastro(self): if len(self.cobra) > self.comp: del self.cobra[0] ''' Método morreu,", "== pygame.QUIT: self.jogando = False break if event.type == pygame.KEYDOWN: if event.key ==", "mostrar desenha na tela o texto criado pelo construtor da classe ''' def", "pygame.draw.rect(fundo, preto, [145, 170, 355, 47]) textoContinuar = Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150,", "corretamente ''' try: pygame.init() print(\"O modulo pygame foi inicializado com sucesso\") except: print(\"O", "False self.perdeu = False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando", "= False break if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT and self.cobra.direcao", "\"perdido\" ''' if self.modo == \"livre\": if self.pos_x + tamanho > largura: self.pos_x", "200, 0) verde_escuro = (0, 150, 0) azul = (0, 0, 255) prata", "= mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x < 143 + 369", "altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe texto servirá para criar objetos de textop que", "268, 279, 51]) pygame.draw.rect(fundo, preto, [185, 270, 275, 47]) textoContinuar = Texto(\"Modo Livre\",", "o jogador voltar a jogar ou sair do jogo ''' def perdido(self): while", "[self.cabeca] ''' Classe maçã que definirá o objeto maçã, não recebe nenhum parâmetro,", "jogador quis sair do jogo ou quer voltar a jogar, caso queira voltar,", "''' Método iniciar, possui o loop principal do jogo, que faz absolutamente tudo", "comprimento e direção, bem como o array que contém a posição de cada", "branco, [0, altura - placar, largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos), cinza,", "como variaveis de controle para continuar jogando, perder, posição e velocidade da cobra,", "0: self.jogando = False self.perdeu = True self.perdido() if self.pos_y + tamanho >", "= y self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca] '''", "< 0: self.jogando = False self.perdeu = True self.perdido() ''' Move a cobra", "= 0 if self.pos_y < 0: self.pos_y = altura - tamanho - placar", "self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 if 193", "placar - 2, largura, 2]) if self.pos_x + tamanho > largura: self.jogando =", "self.velocidade_y = 0 self.pontos = 0 if 193 < mouse_x < 193 +", "self.perdeu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0]", "''' fundo.fill(branco) ''' Desenha \"<NAME>\" na tela ''' textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80)", "de execução estão podem ser obtidos pelo \"pygame.event.get()\", é verificado se o jogador", "== pygame.K_LEFT and self.cobra.direcao != \"direita\": self.cobra.direcao = \"esquerda\" if event.key == pygame.K_RIGHT", "configurações do jogo, relógio para definir o fps, fundo para desenhar tudo do", "pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True, cor) ''' Método mostrar desenha na tela", "a cobra ultrapassou alguma das bordas, caso tenha ultrapassado é definido que não", "serão as novas coordenadas da cabeça e insere a nova cabeça no array", "print(\"O modulo pygame não foi inicializado com sucesso\") ''' Declaração das váriaveis globais", "XY[1], tamanho, tamanho]) indice += 1 ''' Método rastro, remove a cauda quando", "acontecem durante o tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\", sendo", "+ 359 and 168 < mouse_y < 168 + 51: self.jogando = True", "a pontuação final do jogador ''' textoPontuacaoSombra = Texto(\"Pontuação Final: \" + str(self.pontos),", "0 self.velocidade_y = 0 self.pontos = 0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca =", "tamanho - placar else: pygame.draw.rect(fundo, branco, [0, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0,", "47]) textoContinuar = Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão", "absolutamente tudo que acontece no jogo ''' def iniciar(self): pontos_fundo = 0 while", "faz tudo que acontece ao perder, podendo o jogador voltar a jogar ou", "jogar ou sair do jogo ''' def perdido(self): while self.perdeu: ''' Iterador de", "altura - tamanho - placar, 20) ''' Método mostrar, desenha a maçã na", "2, altura]) pygame.draw.rect(fundo, branco, [0, 0, largura, 2]) pygame.draw.rect(fundo, branco, [largura - 2,", "loop principal do jogo, que faz absolutamente tudo que acontece no jogo '''", "jogo ''' class Cobra: def __init__(self, x, y): self.x = x self.y =", "pela cobra ''' def reposicionar(self): self.x = randrange(0, largura - tamanho, 20) self.y", "fundo.blit(self.texto, [x, y]) ''' Classe cobra definirá os elementos do objeto cobra, como", "morreu(self): if any(Bloco == self.cabeca for Bloco in self.cobra[:-1]): return True return False", "+ 369 and 168 < mouse_y < 168 + 51: self.jogando = False", "direção a cobra está seguindo e redefine a nova posição naquela direção '''", "(0, 150, 0) azul = (0, 0, 255) prata = (192, 192, 192)", "redefine a nova posição naquela direção ''' if self.cobra.direcao == \"cima\": self.pos_y -=", "Jogo, definirá todo o restante do jogo, como variaveis de controle para continuar", "Método move, recebe os parâmetro x e y, que serão as novas coordenadas", "''' Checa se a cobra comeu ela mesma, caso tenha comido o jogo", "mouse_x < 143 + 359 and 168 < mouse_y < 168 + 51:", "= preto self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura", "self.pos_y) ''' Limpa o rastro deixado pelo blocos adicionais ''' self.cobra.rastro() ''' Checa", "você poderá atravessar o mapa, mas caso tenha escolhido o modo clássico é", "''' Classe maçã que definirá o objeto maçã, não recebe nenhum parâmetro, possui", "obtidos pelo \"pygame.event.get()\", sendo assim verificado se o jogo não foi fechado, bem", "= 0 self.velocidade_y = 0 self.pontos = 0 ''' Limpa a tela '''", "= 0 self.pontos = 0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca = Maca() '''", "0: self.jogando = False self.perdeu = True self.perdido() ''' Move a cobra para", "''' Limpa a tela a cada novo inicio de loop ''' fundo.fill(self.fundo) '''", "self.pos_y < 0: self.pos_y = altura - tamanho - placar else: pygame.draw.rect(fundo, branco,", "= altura - tamanho - placar else: pygame.draw.rect(fundo, branco, [0, 0, 2, altura])", "recebe os parâmetro x e y, que serão as novas coordenadas da cabeça", "if any(Bloco == self.cabeca for Bloco in self.cobra[:-1]): return True return False '''", "jogo não possuirá bordas e você poderá atravessar o mapa, mas caso tenha", "mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x < 143 + 359 and", "com todos os elementos ''' pygame.display.update() def menu(self): while self.noMenu: ''' Iterador de", "= pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe texto servirá para", "cobra aumenta e o placar de pontos aumenta ''' if self.pos_x == self.maca.x", "pygame.draw.rect(fundo, branco, [0, altura - placar, largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos),", "150, 0) azul = (0, 0, 255) prata = (192, 192, 192) laranja", "self.pos_x < 0: self.pos_x = largura - tamanho if self.pos_y + tamanho >", "= Texto(\"Pontuação Final: \" + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação", "self.noMenu = False self.modo = \"classico\" self.iniciar() if 183 < mouse_x < 183", "[XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho]) indice +=", "= y self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca] self.direcao", "modo clássico é checado se a cobra ultrapassou alguma das bordas, caso tenha", "caso o modo escolhido no menu tenha sido o modo livre, o jogo", "indice = 0 for XY in self.cobra: if indice == len(self.cobra) - 1:", "mouse_y < 168 + 51: self.jogando = False self.perdeu = False self.noMenu =", "len(self.cobra) > self.comp: del self.cobra[0] ''' Método morreu, verifica se a cobra comeu", "< mouse_x < 193 + 279 and 268 < mouse_y < 268 +", "# Iniciando o jogo através da instância ''' Fecha a janela principal do", "mouse_y < 168 + 51: self.jogando = True self.perdeu = False self.noMenu =", "> largura: self.jogando = False self.perdeu = True self.perdido() if self.pos_x < 0:", "''' import pygame import pygame.locals from random import randrange print(\"Módulos importados com sucesso\")", "que foram desenhados anteriormente ''' pygame.display.update() ''' Define o fps do jogo '''", "self.pontos = 0 if 193 < mouse_x < 193 + 279 and 268", "self.cobra = [self.cabeca] ''' Classe maçã que definirá o objeto maçã, não recebe", "o restante do jogo, como variaveis de controle para continuar jogando, perder, posição", "self.pos_y < 0: self.jogando = False self.perdeu = True self.perdido() ''' Move a", "Texto: def __init__(self, msg, cor, tam): self.font = pygame.font.SysFont(None, tam) self.texto = self.font.render(msg,", "e y, que serão as novas coordenadas da cabeça e insere a nova", "iniciais, para caso depois de ter perdido o jogados possa continuar jogando '''", "tamanho, tamanho]) ''' Método reposicionar, define novos x e y aleatórios para a", "modulo pygame não foi inicializado com sucesso\") ''' Declaração das váriaveis globais que", "Aqui primeiro é feita a checagem do modo, caso o modo escolhido no", "importada, juntamente do modulo locals dela, além disso o metodo randrange que usaremos", "move, recebe os parâmetro x e y, que serão as novas coordenadas da", "- tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.cobra.direcao", "in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False self.perdeu = False if", "self.jogando = False self.perdeu = True self.perdido() ''' Desenha a cobra na tela", "e descubra o que isso faz''' # if pontos_fundo == 10: # pontos_fundo", "self.jogando = True self.perdeu = False self.pos_x = randrange(0, largura - tamanho, 20)", "mouse_x < 183 + 279 and 268 < mouse_y < 268 + 51:", "é definida como parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro", "''' def perdido(self): while self.perdeu: ''' Iterador de eventos, todos os eventos que", "cada pedaço da cobra na tela ''' def mostrar(self): indice = 0 for", "\"baixo\": self.pos_y += tamanho elif self.cobra.direcao == \"esquerda\": self.pos_x -= tamanho elif self.cobra.direcao", "= \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos =", "''' Checa se a cobra e a maçã estão na mesma posição, caso", "mapa, mas caso tenha escolhido o modo clássico é checado se a cobra", "y): fundo.blit(self.texto, [x, y]) ''' Classe cobra definirá os elementos do objeto cobra,", "cobra e a maçã estão na mesma posição, caso estejam, a maçã é", "if self.pos_x < 0: self.pos_x = largura - tamanho if self.pos_y + tamanho", "textoContinuar = Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza a tela com", "perdido(self): while self.perdeu: ''' Iterador de eventos, todos os eventos que acontecem durante", "do modulo locals dela, além disso o metodo randrange que usaremos para gerar", "0 if self.pos_x < 0: self.pos_x = largura - tamanho if self.pos_y +", "= (255, 69, 0) cinza = (79, 79, 79) cinzaClaro = (220, 220,", "váriaveis globais que utilizaremos em todo o código, altura e largura da tela,", "ao menu de seleção ''' pygame.draw.rect(fundo, prata, [143, 168, 369, 51]) pygame.draw.rect(fundo, preto,", "se está mais jogando porque perdeu e é chamado o método \"perdido\" '''", "das posições ''' def move(self, x, y): self.cabeca = [x, y] self.cobra.append([x, y])", "-= tamanho elif self.cobra.direcao == \"baixo\": self.pos_y += tamanho elif self.cobra.direcao == \"esquerda\":", "69, 0) cinza = (79, 79, 79) cinzaClaro = (220, 220, 220) '''", "preto = (0, 0, 0) vermelho = (255, 0, 0) verde = (0,", "= pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe texto servirá para criar objetos de", "''' if self.cobra.morreu(): self.jogando = False self.perdeu = True self.perdido() ''' Desenha a", "173) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [183, 268,", "pygame.draw.rect(fundo, prata, [193, 268, 279, 58]) pygame.draw.rect(fundo, preto, [195, 270, 275, 54]) textoContinuar", "[self.x, self.y, tamanho, tamanho]) ''' Método reposicionar, define novos x e y aleatórios", "maçã que definirá o objeto maçã, não recebe nenhum parâmetro, possui os atributos", "tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\", sendo assim verificado se", "= 0 while self.jogando: ''' Iterador de eventos, todos os eventos que acontecem", "para definir o fps, fundo para desenhar tudo do jogo e o título", "cobra está seguindo e redefine a nova posição naquela direção ''' if self.cobra.direcao", "voltar, todo o jogo é redefinido e se retorna para o método iniciar", "o comprimento da cobra ''' def cresce(self): self.comp += 1 ''' Método mostrar,", "é definido que não se está mais jogando porque perdeu e é chamado", "redefine todos os valores da cobra para os valores iniciais, para caso depois", "= self.font.render(msg, True, cor) ''' Método mostrar desenha na tela o texto criado", "maçã na tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho]) '''", "do placar e cores no formato RGB''' largura = 640 altura = 480", "(220, 220, 220) ''' Definição de configurações do jogo, relógio para definir o", "verificado se o jogo não foi fechado, bem como se nenhuma das setas", "Clássico\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando '''", "== pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu = False if event.type == pygame.MOUSEBUTTONDOWN:", "definirá todo o restante do jogo, como variaveis de controle para continuar jogando,", "possuirá bordas e você poderá atravessar o mapa, mas caso tenha escolhido o", "o jogo através da instância ''' Fecha a janela principal do jogo '''", "= \"cima\" if event.key == pygame.K_DOWN and self.cobra.direcao != \"cima\": self.cobra.direcao = \"baixo\"", "pelo blocos adicionais ''' self.cobra.rastro() ''' Checa se a cobra comeu ela mesma,", "- 31) textoPlacar = Texto(\"Pontuação:\" + str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura - 30)", "''' if __name__ == '__main__': instancia = Jogo() instancia.menu() # Iniciando o jogo", "o jogo ''' class Cobra: def __init__(self, x, y): self.x = x self.y", "Final: \" + str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha o botão de", "279, 51]) pygame.draw.rect(fundo, preto, [185, 270, 275, 47]) textoContinuar = Texto(\"Modo Livre\", branco,", "jogo ''' def perdido(self): while self.perdeu: ''' Iterador de eventos, todos os eventos", "e o texto contendo a pontuação atual ''' pygame.draw.rect(fundo, branco, [0, altura -", "sucesso\") ''' Utilizando um bloco de tentativa e erro para checar se o", "== pygame.KEYDOWN: if event.key == pygame.K_LEFT and self.cobra.direcao != \"direita\": self.cobra.direcao = \"esquerda\"", "do objeto cobra, como cabeça, comprimento e direção, bem como o array que", "for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False self.perdeu =", "if self.pos_y < 0: self.jogando = False self.perdeu = True self.perdido() ''' Move", "self.perdeu = True self.perdido() ''' Desenha a cobra na tela ''' self.cobra.mostrar() '''", "self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 if 193 < mouse_x", "A biblioteca pygame é importada, juntamente do modulo locals dela, além disso o", "+ tamanho > altura - placar: self.pos_y = 0 if self.pos_y < 0:", "definir o fps, fundo para desenhar tudo do jogo e o título da", "168 < mouse_y < 168 + 51: self.jogando = True self.perdeu = False", "toda a tela com todos os elementos que foram desenhados anteriormente ''' pygame.display.update()", "utilizaremos em todo o código, altura e largura da tela, tamanho da cobra", "for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False break if", "relogio.tick(15) ''' Método perdido, possui o loop da tela de derrota, faz tudo", "maçã, tamanho do placar e cores no formato RGB''' largura = 640 altura", "self.jogando: ''' Descomente e descubra o que isso faz''' # if pontos_fundo ==", "def __init__(self): self.jogando = False self.perdeu = False self.noMenu = True self.modo =", "self.cobra = [self.cabeca] self.direcao = \"\" ''' Método move, recebe os parâmetro x", "False self.perdeu = True self.perdido() if self.pos_y < 0: self.jogando = False self.perdeu", "maçã, não recebe nenhum parâmetro, possui os atributos x e y que é", "193 + 279 and 268 < mouse_y < 268 + 58: self.jogando =", "= False self.perdeu = True self.perdido() if self.pos_y < 0: self.jogando = False", "da cobra, recebe as coordenadas x e y como parâmetro, que será o", "0 # if self.fundo == branco: # self.fundo = preto # else: #", "pygame.draw.rect(fundo, preto, [145, 170, 365, 47]) textoContinuar = Texto(\"Voltar ao Menu\", branco, 70)", "o jogo é redefinido e se retorna para o método iniciar ''' for", "texto servirá para criar objetos de textop que serão exibidos nas telas do", "assim verificado se o jogo não foi fechado, bem como se nenhuma das", "''' Desenha a cobra na tela ''' self.cobra.mostrar() ''' Desenha o placar e", "iniciar(self): pontos_fundo = 0 while self.jogando: ''' Iterador de eventos, todos os eventos", "- 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1],", "365, 47]) textoContinuar = Texto(\"Voltar ao Menu\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha", "bem como se nenhuma das setas foi apertada para mover a cobra '''", "self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0, altura - tamanho", "tenha escolhido o modo clássico é checado se a cobra ultrapassou alguma das", "else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho]) indice += 1 ''' Método rastro,", "Texto(\"Pontuação:\" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar = Texto(\"Pontuação:\" +", "- placar: self.pos_y = 0 if self.pos_y < 0: self.pos_y = altura -", "da cobra na tela ''' def mostrar(self): indice = 0 for XY in", "True self.perdido() if self.pos_y < 0: self.jogando = False self.perdeu = True self.perdido()", "aumenta o comprimento da cobra ''' def cresce(self): self.comp += 1 ''' Método", "Limpa a tela ''' fundo.fill(branco) ''' Desenha \"<NAME>\" na tela ''' textoPerdeuSombra =", "estão podem ser obtidos pelo \"pygame.event.get()\", sendo assim verificado se o jogo não", "altura - tamanho - placar, 20) self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x", "textoPontuacaoSombra = Texto(\"Pontuação Final: \" + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao =", "359, 51]) pygame.draw.rect(fundo, preto, [145, 170, 355, 47]) textoContinuar = Texto(\"Modo Clássico\", branco,", "preto, [145, 170, 355, 47]) textoContinuar = Texto(\"Modo Clássico\", branco, 70) textoContinuar.mostrar(150, 173)", "self.modo == \"livre\": if self.pos_x + tamanho > largura: self.pos_x = 0 if", "''' textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\", vermelho, 80)", "[x, y]) ''' Classe cobra definirá os elementos do objeto cobra, como cabeça,", "= mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x < 143 + 359", "reposicionar, define novos x e y aleatórios para a maçã após ser comida", "se sim retorna verdadeiro, caso contrário, retorna falso ''' def morreu(self): if any(Bloco", "class Maca: def __init__(self): self.x = randrange(0, largura - tamanho, 20) self.y =", "nova cabeça no array das posições ''' def move(self, x, y): self.cabeca =", "formato RGB''' largura = 640 altura = 480 tamanho = 20 placar =", "''' def mostrar(self, x, y): fundo.blit(self.texto, [x, y]) ''' Classe cobra definirá os", "novas coordenadas da cabeça e insere a nova cabeça no array das posições", "268, 279, 58]) pygame.draw.rect(fundo, preto, [195, 270, 275, 54]) textoContinuar = Texto(\"Novo Jogo\",", "todo o jogo é redefinido e se retorna para o método iniciar '''", "<gh_stars>0 ''' A biblioteca pygame é importada, juntamente do modulo locals dela, além", "0) cinza = (79, 79, 79) cinzaClaro = (220, 220, 220) ''' Definição", "True self.modo = None self.fundo = preto self.pos_x = randrange(0, largura - tamanho,", "os eventos que acontecem durante o tempo de execução estão podem ser obtidos", "mostrar(self): indice = 0 for XY in self.cobra: if indice == len(self.cobra) -", "pygame.locals from random import randrange print(\"Módulos importados com sucesso\") ''' Utilizando um bloco", "str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura - 30) ''' Desenha a maçã na tela", "voltar ao menu de seleção ''' pygame.draw.rect(fundo, prata, [143, 168, 369, 51]) pygame.draw.rect(fundo,", "cor, tam): self.font = pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True, cor) ''' Método", "cobra ''' def reposicionar(self): self.x = randrange(0, largura - tamanho, 20) self.y =", "Desenha o botão de voltar ao menu de seleção ''' pygame.draw.rect(fundo, prata, [143,", "- tamanho - placar, 20) self.cobra.direcao = \"\" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x =", "altura - placar, largura, placar]) textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9,", "iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.noMenu = False", "do jogo, recebe a mensagem a cor e o tamanho como parâmetros '''", "como parâmetro, que será o local na tela onde ela começará o jogo", "= 0 self.pontos = 0 ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha", "break if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT and self.cobra.direcao != \"direita\":", "posição de cada pedaço da cobra, recebe as coordenadas x e y como", "''' if self.pos_x == self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos +=", "cinza = (79, 79, 79) cinzaClaro = (220, 220, 220) ''' Definição de", "Iterador de eventos, todos os eventos que acontecem durante o tempo de execução", "parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro deixado pelo blocos", "and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo += 1 '''", "o tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\", é verificado se", "caso tenha comido o jogo é definido perdido, e o método \"perdido\" é", "- 30) ''' Desenha a maçã na tela ''' self.maca.mostrar() ''' Atualiza toda", "x e y que é a posição da maçã na tela ''' class", "def iniciar(self): pontos_fundo = 0 while self.jogando: ''' Iterador de eventos, todos os", "pontuação atual ''' pygame.draw.rect(fundo, branco, [0, altura - placar, largura, placar]) textoPlacarSombra =", "vermelho, [self.x, self.y, tamanho, tamanho]) ''' Método reposicionar, define novos x e y", "self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo += 1", "+= 1 pontos_fundo += 1 ''' Aqui primeiro é feita a checagem do", "obtidos pelo \"pygame.event.get()\", é verificado se o jogador quis sair do jogo ou", "a cobra comeu ela mesma, se sim retorna verdadeiro, caso contrário, retorna falso", "que serão as novas coordenadas da cabeça e insere a nova cabeça no", "20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.velocidade_x = 0", "= True self.perdido() if self.pos_y < 0: self.jogando = False self.perdeu = True", "< mouse_y < 168 + 51: self.jogando = False self.perdeu = False self.noMenu", "final do jogador ''' textoPontuacaoSombra = Texto(\"Pontuação Final: \" + str(self.pontos), cinza, 50)", "= 0 for XY in self.cobra: if indice == len(self.cobra) - 1: pygame.draw.rect(fundo,", "''' Desenha \"<NAME>\" na tela ''' textoPerdeuSombra = Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29)", "o tempo de execução estão podem ser obtidos pelo \"pygame.event.get()\", sendo assim verificado", "cobra, recebe as coordenadas x e y como parâmetro, que será o local", "o modo clássico é checado se a cobra ultrapassou alguma das bordas, caso", "= True self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura", "if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu = False if event.type", "= False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando = False", "if indice == len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else:", "menu(self): while self.noMenu: ''' Iterador de eventos, todos os eventos que acontecem durante", "pygame.draw.rect(fundo, prata, [143, 168, 359, 51]) pygame.draw.rect(fundo, preto, [145, 170, 355, 47]) textoContinuar", "\"livre\" self.iniciar() ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha o titulo \"Snake", "= (255, 0, 0) verde = (0, 200, 0) verde_escuro = (0, 150,", "blocos adicionais ''' self.cobra.rastro() ''' Checa se a cobra comeu ela mesma, caso", "if self.pos_x + tamanho > largura: self.pos_x = 0 if self.pos_x < 0:", "''' def reposicionar(self): self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0,", "31) textoPlacar = Texto(\"Pontuação:\" + str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura - 30) '''", "= 0 ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha \"<NAME>\" na tela", "o rastro deixado pelo blocos adicionais ''' self.cobra.rastro() ''' Checa se a cobra", "placar]) textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar", "o mapa, mas caso tenha escolhido o modo clássico é checado se a", "do jogo e o título da janela do jogo ''' relogio = pygame.time.Clock()", "randrange(0, largura - tamanho, 20) self.y = randrange(0, altura - tamanho - placar,", "do jogo, como variaveis de controle para continuar jogando, perder, posição e velocidade", "x e y, que serão as novas coordenadas da cabeça e insere a", "False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y =", "as novas coordenadas da cabeça e insere a nova cabeça no array das", "se o pygame foi iniciado corretamente ''' try: pygame.init() print(\"O modulo pygame foi", "se nenhuma das setas foi apertada para mover a cobra ''' for event", "desenha cada pedaço da cobra na tela ''' def mostrar(self): indice = 0", "o método \"perdido\" ''' if self.modo == \"livre\": if self.pos_x + tamanho >", "pygame não foi inicializado com sucesso\") ''' Declaração das váriaveis globais que utilizaremos", "== pygame.K_DOWN and self.cobra.direcao != \"cima\": self.cobra.direcao = \"baixo\" if event.key == pygame.K_SPACE:", "29) textoPerdeu = Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação final", "e maçã ''' import pygame import pygame.locals from random import randrange print(\"Módulos importados", "escolhido o modo clássico é checado se a cobra ultrapassou alguma das bordas,", "50) textoPontuacao.mostrar(180, 100) ''' Desenha o botão de voltar ao menu de seleção", "mouse_y < 268 + 51: self.jogando = True self.noMenu = False self.perdeu =", "import pygame.locals from random import randrange print(\"Módulos importados com sucesso\") ''' Utilizando um", "laranja = (255, 69, 0) cinza = (79, 79, 79) cinzaClaro = (220,", "0) verde_escuro = (0, 150, 0) azul = (0, 0, 255) prata =", "''' def reinicia(self, x, y): self.x = x self.y = y self.cabeca =", "< mouse_y < 268 + 58: self.jogando = True self.perdeu = False self.pos_x", "else: pygame.draw.rect(fundo, branco, [0, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, 0, largura, 2])", "valores da cobra para os valores iniciais, para caso depois de ter perdido", "self.maca = Maca() ''' Método iniciar, possui o loop principal do jogo, que", "51]) pygame.draw.rect(fundo, preto, [145, 170, 365, 47]) textoContinuar = Texto(\"Voltar ao Menu\", branco,", "359 and 168 < mouse_y < 168 + 51: self.jogando = True self.perdeu", "30) ''' Desenha a maçã na tela ''' self.maca.mostrar() ''' Atualiza toda a", "para as posições da cobra e maçã ''' import pygame import pygame.locals from", "coordenadas x e y como parâmetro, que será o local na tela onde", "if 143 < mouse_x < 143 + 359 and 168 < mouse_y <", "279 and 268 < mouse_y < 268 + 51: self.jogando = True self.noMenu", "ao Menu\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando", "pygame import pygame.locals from random import randrange print(\"Módulos importados com sucesso\") ''' Utilizando", "''' Limpa a tela ''' fundo.fill(branco) ''' Desenha o titulo \"Snake Game\" na", "foi fechado, bem como se nenhuma das setas foi apertada para mover a", "tamanho > largura: self.pos_x = 0 if self.pos_x < 0: self.pos_x = largura", "20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.cobra.direcao = \"\"", "= 0 if 193 < mouse_x < 193 + 279 and 268 <", "< mouse_x < 183 + 279 and 268 < mouse_y < 268 +", "self.pos_x + tamanho > largura: self.pos_x = 0 if self.pos_x < 0: self.pos_x", "self.pontos = 0 ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha \"<NAME>\" na", "268 < mouse_y < 268 + 51: self.jogando = True self.noMenu = False", "o placar e o texto contendo a pontuação atual ''' pygame.draw.rect(fundo, branco, [0,", "= Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha o botão de continuar", "largura: self.pos_x = 0 if self.pos_x < 0: self.pos_x = largura - tamanho", "25) textoPlacar.mostrar(10, altura - 30) ''' Desenha a maçã na tela ''' self.maca.mostrar()", "+ tamanho > largura: self.jogando = False self.perdeu = True self.perdido() if self.pos_x", "self.comp = 1 self.cobra = [self.cabeca] ''' Classe maçã que definirá o objeto", "''' Desenha o placar e o texto contendo a pontuação atual ''' pygame.draw.rect(fundo,", "possui o loop da tela de derrota, faz tudo que acontece ao perder,", "de eventos, todos os eventos que acontecem durante o tempo de execução estão", "- placar else: pygame.draw.rect(fundo, branco, [0, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, 0,", "move(self, x, y): self.cabeca = [x, y] self.cobra.append([x, y]) ''' Método cresce, aumenta", "25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar = Texto(\"Pontuação:\" + str(self.pontos), branco, 25) textoPlacar.mostrar(10,", "o objeto maçã, não recebe nenhum parâmetro, possui os atributos x e y", "40 branco = (255, 255, 255) preto = (0, 0, 0) vermelho =", "tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho]) indice += 1 '''", "Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [143, 168, 359, 51])", "alguma das bordas, caso tenha ultrapassado é definido que não se está mais", "jogo e o título da janela do jogo ''' relogio = pygame.time.Clock() fundo", "= False self.perdeu = False self.noMenu = True self.modo = None self.fundo =", "onde ela começará o jogo ''' class Cobra: def __init__(self, x, y): self.x", "é reposicionada, a cobra aumenta e o placar de pontos aumenta ''' if", "Texto(\"Pontuação Final: \" + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação Final:", "foi inicializado com sucesso\") ''' Declaração das váriaveis globais que utilizaremos em todo", "Texto(\"<NAME>\", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30) '''", "= Texto(\"<NAME>\", vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação final do jogador", "criar objetos de textop que serão exibidos nas telas do jogo, recebe a", "0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca = Maca() ''' Método iniciar, possui o", "disso o metodo randrange que usaremos para gerar numeros aleatórios para as posições", "se a cobra comeu ela mesma, se sim retorna verdadeiro, caso contrário, retorna", "no array das posições ''' def move(self, x, y): self.cabeca = [x, y]", "elif self.cobra.direcao == \"esquerda\": self.pos_x -= tamanho elif self.cobra.direcao == \"direita\": self.pos_x +=", "(255, 255, 255) preto = (0, 0, 0) vermelho = (255, 0, 0)", "self.cobra: if indice == len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho])", "''' self.cobra.rastro() ''' Checa se a cobra comeu ela mesma, caso tenha comido", "+ 279 and 268 < mouse_y < 268 + 51: self.jogando = True", "== branco: # self.fundo = preto # else: # self.fundo = branco '''", "= (255, 255, 255) preto = (0, 0, 0) vermelho = (255, 0,", "event.key == pygame.K_RIGHT and self.cobra.direcao != \"esquerda\": self.cobra.direcao = \"direita\" if event.key ==", "False self.perdeu = True self.perdido() ''' Move a cobra para a nova posição", "class Texto: def __init__(self, msg, cor, tam): self.font = pygame.font.SysFont(None, tam) self.texto =", "event.type == pygame.QUIT: self.jogando = False break if event.type == pygame.KEYDOWN: if event.key", "as posições da cobra e maçã ''' import pygame import pygame.locals from random", "and self.cobra.direcao != \"cima\": self.cobra.direcao = \"baixo\" if event.key == pygame.K_SPACE: self.pontos +=", "event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if", "= False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu = False", "47]) textoContinuar = Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza a tela", "nova posição naquela direção ''' if self.cobra.direcao == \"cima\": self.pos_y -= tamanho elif", "80) textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação final do jogador ''' textoPontuacaoSombra =", "= 1 self.cobra = [self.cabeca] ''' Classe maçã que definirá o objeto maçã,", "''' Método mostrar desenha na tela o texto criado pelo construtor da classe", "o texto criado pelo construtor da classe ''' def mostrar(self, x, y): fundo.blit(self.texto,", "0, 2, altura]) pygame.draw.rect(fundo, branco, [0, altura - placar - 2, largura, 2])", "Desenha o titulo \"Snake Game\" na tela ''' # textoPerdeuSombra = Texto(\"Snake Game\",", "comido o jogo é definido perdido, e o método \"perdido\" é chamado '''", "Final: \" + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto(\"Pontuação Final: \"", "e y como parâmetro, que será o local na tela onde ela começará", "pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False break if event.type == pygame.KEYDOWN:", "str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha o botão de voltar ao menu", "gerar numeros aleatórios para as posições da cobra e maçã ''' import pygame", "Texto(\"Snake Game\", preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha o botão de continuar jogando", "altura = 480 tamanho = 20 placar = 40 branco = (255, 255,", "textoContinuar = Texto(\"Voltar ao Menu\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão", "numeros aleatórios para as posições da cobra e maçã ''' import pygame import", "altura - 31) textoPlacar = Texto(\"Pontuação:\" + str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura -", "0) azul = (0, 0, 255) prata = (192, 192, 192) laranja =", "''' def mostrar(self): indice = 0 for XY in self.cobra: if indice ==", "self.pos_y -= tamanho elif self.cobra.direcao == \"baixo\": self.pos_y += tamanho elif self.cobra.direcao ==", "o loop da tela de derrota, faz tudo que acontece ao perder, podendo", "= False self.perdeu = False self.noMenu = True self.pos_x = randrange(0, largura -", "biblioteca pygame é importada, juntamente do modulo locals dela, além disso o metodo", "elementos do objeto cobra, como cabeça, comprimento e direção, bem como o array", "self.noMenu = True self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0,", "cor) ''' Método mostrar desenha na tela o texto criado pelo construtor da", "de ter perdido o jogados possa continuar jogando ''' def reinicia(self, x, y):", "Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [183, 268, 279, 51])", "20) ''' Método mostrar, desenha a maçã na tela ''' def mostrar(self): pygame.draw.rect(fundo,", "na tela ''' self.cobra.mostrar() ''' Desenha o placar e o texto contendo a", "remove a cauda quando o tamanho do array é maior que o comprimento", "30) ''' Desenha a pontuação final do jogador ''' textoPontuacaoSombra = Texto(\"Pontuação Final:", "posição naquela direção ''' if self.cobra.direcao == \"cima\": self.pos_y -= tamanho elif self.cobra.direcao", "Jogo\", branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza a tela com todos os elementos", "pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x < 143", "''' Descomente e descubra o que isso faz''' # if pontos_fundo == 10:", "RGB''' largura = 640 altura = 480 tamanho = 20 placar = 40", "o local na tela onde ela começará o jogo ''' class Cobra: def", "self.cobra.append([x, y]) ''' Método cresce, aumenta o comprimento da cobra ''' def cresce(self):", "= [x, y] self.comp = 1 self.cobra = [self.cabeca] self.direcao = \"\" '''", "ultrapassado é definido que não se está mais jogando porque perdeu e é", "definirá os elementos do objeto cobra, como cabeça, comprimento e direção, bem como", "jogo é redefinido e se retorna para o método iniciar ''' for event", "Classe cobra definirá os elementos do objeto cobra, como cabeça, comprimento e direção,", "0 self.pontos = 0 ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha \"<NAME>\"", "Método reposicionar, define novos x e y aleatórios para a maçã após ser", "in self.cobra: if indice == len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho,", "os valores da cobra para os valores iniciais, para caso depois de ter", "self.cobra.mostrar() ''' Desenha o placar e o texto contendo a pontuação atual '''", "pygame.draw.rect(fundo, prata, [143, 168, 369, 51]) pygame.draw.rect(fundo, preto, [145, 170, 365, 47]) textoContinuar", "na tela ''' self.maca.mostrar() ''' Atualiza toda a tela com todos os elementos", "= (0, 0, 0) vermelho = (255, 0, 0) verde = (0, 200,", "if 193 < mouse_x < 193 + 279 and 268 < mouse_y <", "1 ''' Método rastro, remove a cauda quando o tamanho do array é", "and 168 < mouse_y < 168 + 51: self.jogando = True self.perdeu =", "pedaço da cobra, recebe as coordenadas x e y como parâmetro, que será", "pygame.init() print(\"O modulo pygame foi inicializado com sucesso\") except: print(\"O modulo pygame não", "(192, 192, 192) laranja = (255, 69, 0) cinza = (79, 79, 79)", "and 268 < mouse_y < 268 + 58: self.jogando = True self.perdeu =", "botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [183, 268, 279, 51]) pygame.draw.rect(fundo, preto,", "jogo ''' relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption(\"Snake Game\") ''' Classe", "msg, cor, tam): self.font = pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True, cor) '''", "reinicia, redefine todos os valores da cobra para os valores iniciais, para caso", "randrange que usaremos para gerar numeros aleatórios para as posições da cobra e", "altura - tamanho - placar, 20) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos", "- tamanho - placar, 20) ''' Método mostrar, desenha a maçã na tela", "self.perdeu = False self.modo = \"livre\" self.iniciar() ''' Limpa a tela ''' fundo.fill(branco)", "textoPlacarSombra = Texto(\"Pontuação:\" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar =", "if event.type == pygame.QUIT: self.noMenu = False if event.type == pygame.KEYDOWN: if event.key", "+ 51: self.jogando = True self.perdeu = False self.noMenu = False self.modo =", "que acontece ao perder, podendo o jogador voltar a jogar ou sair do", "preto self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura -", "loop ''' fundo.fill(self.fundo) ''' Checa para qual direção a cobra está seguindo e", "e insere a nova cabeça no array das posições ''' def move(self, x,", "na tela ''' class Maca: def __init__(self): self.x = randrange(0, largura - tamanho,", "79, 79) cinzaClaro = (220, 220, 220) ''' Definição de configurações do jogo,", "Menu\", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando '''", "y, que serão as novas coordenadas da cabeça e insere a nova cabeça", "da cobra e maçã ''' import pygame import pygame.locals from random import randrange", "posições da cobra e maçã ''' import pygame import pygame.locals from random import", "maçã é reposicionada, a cobra aumenta e o placar de pontos aumenta '''", "checado se a cobra ultrapassou alguma das bordas, caso tenha ultrapassado é definido", "de seleção ''' pygame.draw.rect(fundo, prata, [143, 168, 369, 51]) pygame.draw.rect(fundo, preto, [145, 170,", "275, 47]) textoContinuar = Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza a", "== \"esquerda\": self.pos_x -= tamanho elif self.cobra.direcao == \"direita\": self.pos_x += tamanho else:", "da tela de derrota, faz tudo que acontece ao perder, podendo o jogador", "= True self.perdido() if self.pos_y + tamanho > altura - placar: self.jogando =", "pygame.QUIT: self.jogando = False self.perdeu = False if event.type == pygame.KEYDOWN: if event.key", "maçã ''' import pygame import pygame.locals from random import randrange print(\"Módulos importados com", "[185, 270, 275, 47]) textoContinuar = Texto(\"Modo Livre\", branco, 70) textoContinuar.mostrar(190, 273) '''", "< 0: self.pos_x = largura - tamanho if self.pos_y + tamanho > altura", "True self.noMenu = False self.perdeu = False self.modo = \"livre\" self.iniciar() ''' Limpa", "aleatórios para as posições da cobra e maçã ''' import pygame import pygame.locals", "self.pos_y += tamanho elif self.cobra.direcao == \"esquerda\": self.pos_x -= tamanho elif self.cobra.direcao ==" ]
[ "'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤', #", "): ') if confirm == 'True': del self.accountDict[email] return 'ActionSuccessful' if confirm ==", "line() continue password = input('請輸入密碼: ') if not password: print(output('PasswordNull')) line() continue if", "account', 'save money', 'take money'): email = input('請輸入E-mail: ') if not email: print(output('EmailNull'))", "= int(amount) if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful'", "# Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者:", "'NameNull' self.accountDict[email] = {'name': name, 'password': password, 'deposit': 0} return 'CreateSuccessful' def deleteAccount(self,", "return check if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return", "'ActionSuccessful' if confirm == 'False': return 'ActionCancel' return 'UnknowAction' def saveMoney(self, email, password,", "'[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error]", "createAccount(self, email, password, name): if email in self.accountDict: return 'EmailUsed' if not name:", "password) if check: return check if amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit'] <", "Deposit 'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def line():", "'leave': print('程式關閉') tragger = False if action in ('create account', 'delete account', 'save", "password) if check: return check if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] += amount", "not password: print(output('PasswordNull')) line() continue if action in ('save money', 'take money'): amount", "Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful':", "未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough':", "('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press any key to continue .", "sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save money\"', '取款", "out = Bank.deleteAccount(email, password) elif action == 'save money': out = Bank.saveMoney(email, password,", "out = Bank.takeMoney(email, password, amount) print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額:", "'AmountNull': '[Error] 請輸入金額', # Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError':", "BY-NC-ND 3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"', '存款", "'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press any key to continue . .", "+= amount return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email, password, amount): check =", "check if amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit']", "self.accountDict = {} def inputCheck(self, email, password): if email not in self.accountDict: return", "'[Error] 請輸入金額', # Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error]", "'delete account', 'save money', 'take money'): email = input('請輸入E-mail: ') if not email:", "bank() def output(string:str): stringDict = { # Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error]", "input('請輸入使用者名稱: ') out = Bank.createAccount(email, password, name) elif action == 'delete account': out", "class bank: def __init__(self): self.accountDict = {} def inputCheck(self, email, password): if email", "if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return 'AmountEnterError'", "self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] != password: return 'PasswrodError' return None def createAccount(self,", "return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email, password): check = Bank.inputCheck(email, password) if", "password): check = Bank.inputCheck(email, password) if check: return check return self.accountDict[email]['deposit'] Bank =", "== 'create account': name = input('請輸入使用者名稱: ') out = Bank.createAccount(email, password, name) elif", "email, password, amount): check = Bank.inputCheck(email, password) if check: return check if amount.isdigit():", "self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email, password): check =", "if not email: print(output('EmailNull')) line() continue password = input('請輸入密碼: ') if not password:", "amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email,", "if confirm == 'True': del self.accountDict[email] return 'ActionSuccessful' if confirm == 'False': return", "Bank.deleteAccount(email, password) elif action == 'save money': out = Bank.saveMoney(email, password, amount) elif", "'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email, password): check", "'[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error]", "'EmailUsed' if not name: return 'NameNull' self.accountDict[email] = {'name': name, 'password': password, 'deposit':", "if action == 'leave': print('程式關閉') tragger = False if action in ('create account',", "action == 'delete account': out = Bank.deleteAccount(email, password) elif action == 'save money':", "action == 'take money': out = Bank.takeMoney(email, password, amount) print(output(out)) if out in", "YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶", "'本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete", "'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def line(): print('='*100)", "email, password, name): if email in self.accountDict: return 'EmailUsed' if not name: return", "<filename>main.py class bank: def __init__(self): self.accountDict = {} def inputCheck(self, email, password): if", "amount: print(output('AmountNull')) line() continue if action == 'create account': name = input('請輸入使用者名稱: ')", "amount): check = Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount =", "password) elif action == 'save money': out = Bank.saveMoney(email, password, amount) elif action", "action not in ('create account', 'delete account', 'save money', 'take money', 'leave'): print('[Error]", "email, password): if email not in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] != password:", "Bank.saveMoney(email, password, amount) elif action == 'take money': out = Bank.takeMoney(email, password, amount)", "if action == 'create account': name = input('請輸入使用者名稱: ') out = Bank.createAccount(email, password,", "money': out = Bank.takeMoney(email, password, amount) print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'):", "amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount", "line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save money\"', '取款 請輸入\"take", "sep='\\n') line() tragger = True while tragger: action = input('輸入您要進行的操作: ') if not", "return 'AmountEnterError' def takeMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check:", "money': out = Bank.saveMoney(email, password, amount) elif action == 'take money': out =", "continue if action == 'create account': name = input('請輸入使用者名稱: ') out = Bank.createAccount(email,", "action = input('輸入您要進行的操作: ') if not action: print('[Error] 請輸入操作名稱') line() continue if action", "'False': return 'ActionCancel' return 'UnknowAction' def saveMoney(self, email, password, amount): check = Bank.inputCheck(email,", "if confirm == 'False': return 'ActionCancel' return 'UnknowAction' def saveMoney(self, email, password, amount):", "check if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return 'AmountEnterError'", "-= amount return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email, password): check = Bank.inputCheck(email,", "not email: print(output('EmailNull')) line() continue password = input('請輸入密碼: ') if not password: print(output('PasswordNull'))", "check = Bank.inputCheck(email, password) if check: return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ):", "Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功'", "None def createAccount(self, email, password, name): if email in self.accountDict: return 'EmailUsed' if", "Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT", "= Bank.inputCheck(email, password) if check: return check return self.accountDict[email]['deposit'] Bank = bank() def", "name: return 'NameNull' self.accountDict[email] = {'name': name, 'password': password, 'deposit': 0} return 'CreateSuccessful'", "'帳戶創建成功' } return stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github:", "'刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save money\"', '取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line()", "'取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line() tragger = True while tragger: action", "Bank.createAccount(email, password, name) elif action == 'delete account': out = Bank.deleteAccount(email, password) elif", "print(output('PasswordNull')) line() continue if action in ('save money', 'take money'): amount = input('請輸入金額:", "Bank.inputCheck(email, password) if check: return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if", "'操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount", "output(string:str): stringDict = { # Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull':", "print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press any", "= Bank.inputCheck(email, password) if check: return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ')", "3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save", "# Deposit 'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def", "email = input('請輸入E-mail: ') if not email: print(output('EmailNull')) line() continue password = input('請輸入密碼:", "Bank.inputCheck(email, password) if check: return check return self.accountDict[email]['deposit'] Bank = bank() def output(string:str):", "return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if confirm == 'True': del", "'取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足', # Account", "if check: return check return self.accountDict[email]['deposit'] Bank = bank() def output(string:str): stringDict =", "'存款 請輸入\"save money\"', '取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line() tragger = True", "金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string]", "= int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email, password,", "not in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] != password: return 'PasswrodError' return None", "password, amount): check = Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount", "confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if confirm == 'True': del self.accountDict[email] return", "== 'delete account': out = Bank.deleteAccount(email, password) elif action == 'save money': out", "if check: return check if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] += amount return", "請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail',", "'CreateSuccessful' def deleteAccount(self, email, password): check = Bank.inputCheck(email, password) if check: return check", "action in ('create account', 'delete account', 'save money', 'take money'): email = input('請輸入E-mail:", "account': name = input('請輸入使用者名稱: ') out = Bank.createAccount(email, password, name) elif action ==", "self.accountDict[email] = {'name': name, 'password': password, 'deposit': 0} return 'CreateSuccessful' def deleteAccount(self, email,", "password, amount) elif action == 'take money': out = Bank.takeMoney(email, password, amount) print(output(out))", "account', 'delete account', 'save money', 'take money', 'leave'): print('[Error] 請輸入正確的操作') line() continue if", "tragger = False if action in ('create account', 'delete account', 'save money', 'take", "('create account', 'delete account', 'save money', 'take money', 'leave'): print('[Error] 請輸入正確的操作') line() continue", "'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed':", "password, name): if email in self.accountDict: return 'EmailUsed' if not name: return 'NameNull'", "saveMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check: return check if", "Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create", "if check: return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if confirm ==", "print('程式關閉') tragger = False if action in ('create account', 'delete account', 'save money',", "'[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful':", "{ # Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull':", "amount) elif action == 'take money': out = Bank.takeMoney(email, password, amount) print(output(out)) if", "= Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit']", "'take money': out = Bank.takeMoney(email, password, amount) print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful',", "('save money', 'take money'): amount = input('請輸入金額: ') if not amount: print(output('AmountNull')) line()", "def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND", "not in ('create account', 'delete account', 'save money', 'take money', 'leave'): print('[Error] 請輸入正確的操作')", "not action: print('[Error] 請輸入操作名稱') line() continue if action not in ('create account', 'delete", "return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email, password):", "= Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount = int(amount) if", "'離開此系統 請輸入\"leave\"', sep='\\n') line() tragger = True while tragger: action = input('輸入您要進行的操作: ')", "input('輸入您要進行的操作: ') if not action: print('[Error] 請輸入操作名稱') line() continue if action not in", "'[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功' } return", "check: return check return self.accountDict[email]['deposit'] Bank = bank() def output(string:str): stringDict = {", "'ActionCancel' return 'UnknowAction' def saveMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if", "return 'UnknowAction' def saveMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check:", "== 'save money': out = Bank.saveMoney(email, password, amount) elif action == 'take money':", "密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶',", "in ('create account', 'delete account', 'save money', 'take money'): email = input('請輸入E-mail: ')", "# Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful':", "return self.accountDict[email]['deposit'] Bank = bank() def output(string:str): stringDict = { # Null 'EmailNull':", "if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return 'AmountEnterError' def", "def __init__(self): self.accountDict = {} def inputCheck(self, email, password): if email not in", "請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account Error", "0} return 'CreateSuccessful' def deleteAccount(self, email, password): check = Bank.inputCheck(email, password) if check:", "'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功',", "amount = int(amount) if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return", "'save money', 'take money'): email = input('請輸入E-mail: ') if not email: print(output('EmailNull')) line()", "= input('請輸入E-mail: ') if not email: print(output('EmailNull')) line() continue password = input('請輸入密碼: ')", "out = Bank.saveMoney(email, password, amount) elif action == 'take money': out = Bank.takeMoney(email,", "continue if action in ('save money', 'take money'): amount = input('請輸入金額: ') if", "amount = int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email,", "return 'ActionCancel' return 'UnknowAction' def saveMoney(self, email, password, amount): check = Bank.inputCheck(email, password)", "password, name) elif action == 'delete account': out = Bank.deleteAccount(email, password) elif action", "= input('請輸入使用者名稱: ') out = Bank.createAccount(email, password, name) elif action == 'delete account':", "!= password: return 'PasswrodError' return None def createAccount(self, email, password, name): if email", "def deleteAccount(self, email, password): check = Bank.inputCheck(email, password) if check: return check confirm", "self.accountDict: return 'EmailUsed' if not name: return 'NameNull' self.accountDict[email] = {'name': name, 'password':", "del self.accountDict[email] return 'ActionSuccessful' if confirm == 'False': return 'ActionCancel' return 'UnknowAction' def", "'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', #", "check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if confirm == 'True': del self.accountDict[email]", "存款不足', # Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作',", "elif action == 'delete account': out = Bank.deleteAccount(email, password) elif action == 'save", "def takeMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check: return check", "money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line() tragger = True while tragger: action = input('輸入您要進行的操作:", "請輸入操作名稱') line() continue if action not in ('create account', 'delete account', 'save money',", "https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶", "int(amount) if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return", "def saveMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check: return check", "money\"', '取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line() tragger = True while tragger:", "not name: return 'NameNull' self.accountDict[email] = {'name': name, 'password': password, 'deposit': 0} return", "'deposit': 0} return 'CreateSuccessful' def deleteAccount(self, email, password): check = Bank.inputCheck(email, password) if", "return 'ActionSuccessful' if confirm == 'False': return 'ActionCancel' return 'UnknowAction' def saveMoney(self, email,", "'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作',", "self.accountDict[email]['password'] != password: return 'PasswrodError' return None def createAccount(self, email, password, name): if", "{'name': name, 'password': password, 'deposit': 0} return 'CreateSuccessful' def deleteAccount(self, email, password): check", "continue if action == 'leave': print('程式關閉') tragger = False if action in ('create", "'[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit", "money'): email = input('請輸入E-mail: ') if not email: print(output('EmailNull')) line() continue password =", "'True': del self.accountDict[email] return 'ActionSuccessful' if confirm == 'False': return 'ActionCancel' return 'UnknowAction'", "amount return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email, password, amount): check = Bank.inputCheck(email,", "in ('save money', 'take money'): amount = input('請輸入金額: ') if not amount: print(output('AmountNull'))", "line() continue if action == 'create account': name = input('請輸入使用者名稱: ') out =", "'[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account Error 'EmailNotFound': '[Error]", "print('[Error] 請輸入正確的操作') line() continue if action == 'leave': print('程式關閉') tragger = False if", "account': out = Bank.deleteAccount(email, password) elif action == 'save money': out = Bank.saveMoney(email,", "'[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction':", "'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if", "'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', #", "return 'EmailUsed' if not name: return 'NameNull' self.accountDict[email] = {'name': name, 'password': password,", "Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] +=", "return check return self.accountDict[email]['deposit'] Bank = bank() def output(string:str): stringDict = { #", "== 'True': del self.accountDict[email] return 'ActionSuccessful' if confirm == 'False': return 'ActionCancel' return", "'[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def line(): print('='*100) line()", "money', 'take money', 'leave'): print('[Error] 請輸入正確的操作') line() continue if action == 'leave': print('程式關閉')", "return 'AmountEnterError' def showDeposit(self, email, password): check = Bank.inputCheck(email, password) if check: return", "print(output('AmountNull')) line() continue if action == 'create account': name = input('請輸入使用者名稱: ') out", "= {'name': name, 'password': password, 'deposit': 0} return 'CreateSuccessful' def deleteAccount(self, email, password):", "return 'PasswrodError' return None def createAccount(self, email, password, name): if email in self.accountDict:", "elif action == 'save money': out = Bank.saveMoney(email, password, amount) elif action ==", "'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error]", "def inputCheck(self, email, password): if email not in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password']", "account\"', '存款 請輸入\"save money\"', '取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line() tragger =", "line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\\n')", "password: print(output('PasswordNull')) line() continue if action in ('save money', 'take money'): amount =", "name, 'password': password, 'deposit': 0} return 'CreateSuccessful' def deleteAccount(self, email, password): check =", "{} def inputCheck(self, email, password): if email not in self.accountDict: return 'EmailNotFound' if", "name) elif action == 'delete account': out = Bank.deleteAccount(email, password) elif action ==", "line() continue if action not in ('create account', 'delete account', 'save money', 'take", "'若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save money\"', '取款 請輸入\"take money\"', '離開此系統", "'take money'): email = input('請輸入E-mail: ') if not email: print(output('EmailNull')) line() continue password", "'[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel':", "# Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', #", "'save money', 'take money', 'leave'): print('[Error] 請輸入正確的操作') line() continue if action == 'leave':", "') if not password: print(output('PasswordNull')) line() continue if action in ('save money', 'take", "') if not action: print('[Error] 請輸入操作名稱') line() continue if action not in ('create", "'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消',", "== 'leave': print('程式關閉') tragger = False if action in ('create account', 'delete account',", "'password': password, 'deposit': 0} return 'CreateSuccessful' def deleteAccount(self, email, password): check = Bank.inputCheck(email,", "= input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if confirm == 'True': del self.accountDict[email] return 'ActionSuccessful'", "showDeposit(self, email, password): check = Bank.inputCheck(email, password) if check: return check return self.accountDict[email]['deposit']", "money', 'take money'): amount = input('請輸入金額: ') if not amount: print(output('AmountNull')) line() continue", "} return stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415',", "Bank = bank() def output(string:str): stringDict = { # Null 'EmailNull': '[Error] 請輸入E-mail',", "amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self,", "password, amount) print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line()", "amount return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email, password): check = Bank.inputCheck(email, password)", "action == 'create account': name = input('請輸入使用者名稱: ') out = Bank.createAccount(email, password, name)", "= input('請輸入密碼: ') if not password: print(output('PasswordNull')) line() continue if action in ('save", "# Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error]", "password = input('請輸入密碼: ') if not password: print(output('PasswordNull')) line() continue if action in", "in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press any key to continue", "# Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful':", "input('請輸入E-mail: ') if not email: print(output('EmailNull')) line() continue password = input('請輸入密碼: ') if", "'delete account', 'save money', 'take money', 'leave'): print('[Error] 請輸入正確的操作') line() continue if action", "takeMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check: return check if", "email, password): check = Bank.inputCheck(email, password) if check: return check return self.accountDict[email]['deposit'] Bank", "check: return check if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful'", "int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email, password, amount):", "請輸入\"leave\"', sep='\\n') line() tragger = True while tragger: action = input('輸入您要進行的操作: ') if", "if action in ('create account', 'delete account', 'save money', 'take money'): email =", "= input('輸入您要進行的操作: ') if not action: print('[Error] 請輸入操作名稱') line() continue if action not", "Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功',", "continue password = input('請輸入密碼: ') if not password: print(output('PasswordNull')) line() continue if action", "'take money', 'leave'): print('[Error] 請輸入正確的操作') line() continue if action == 'leave': print('程式關閉') tragger", "請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line() tragger = True while tragger: action =", "= Bank.createAccount(email, password, name) elif action == 'delete account': out = Bank.deleteAccount(email, password)", "'create account': name = input('請輸入使用者名稱: ') out = Bank.createAccount(email, password, name) elif action", "action in ('save money', 'take money'): amount = input('請輸入金額: ') if not amount:", "out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press any key to", "action == 'leave': print('程式關閉') tragger = False if action in ('create account', 'delete", "請輸入\"delete account\"', '存款 請輸入\"save money\"', '取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line() tragger", "stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC", "return None def createAccount(self, email, password, name): if email in self.accountDict: return 'EmailUsed'", "E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error]", "out = Bank.createAccount(email, password, name) elif action == 'delete account': out = Bank.deleteAccount(email,", "== 'take money': out = Bank.takeMoney(email, password, amount) print(output(out)) if out in ('SaveSuccessful',", "('create account', 'delete account', 'save money', 'take money'): email = input('請輸入E-mail: ') if", "if email not in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] != password: return 'PasswrodError'", "amount = input('請輸入金額: ') if not amount: print(output('AmountNull')) line() continue if action ==", "name): if email in self.accountDict: return 'EmailUsed' if not name: return 'NameNull' self.accountDict[email]", "confirm == 'False': return 'ActionCancel' return 'UnknowAction' def saveMoney(self, email, password, amount): check", "'leave'): print('[Error] 請輸入正確的操作') line() continue if action == 'leave': print('程式關閉') tragger = False", "password, 'deposit': 0} return 'CreateSuccessful' def deleteAccount(self, email, password): check = Bank.inputCheck(email, password)", "deleteAccount(self, email, password): check = Bank.inputCheck(email, password) if check: return check confirm =", "') if confirm == 'True': del self.accountDict[email] return 'ActionSuccessful' if confirm == 'False':", "name = input('請輸入使用者名稱: ') out = Bank.createAccount(email, password, name) elif action == 'delete", "if amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -=", "請輸入金額', # Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤',", "email, password): check = Bank.inputCheck(email, password) if check: return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}(", "input('請輸入密碼: ') if not password: print(output('PasswordNull')) line() continue if action in ('save money',", "return 'EmailNotFound' if self.accountDict[email]['password'] != password: return 'PasswrodError' return None def createAccount(self, email,", "password: return 'PasswrodError' return None def createAccount(self, email, password, name): if email in", "def showDeposit(self, email, password): check = Bank.inputCheck(email, password) if check: return check return", "< amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self,", "請輸入\"save money\"', '取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n') line() tragger = True while", "= Bank.takeMoney(email, password, amount) print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email,", "print(output('EmailNull')) line() continue password = input('請輸入密碼: ') if not password: print(output('PasswordNull')) line() continue", "'UnknowAction' def saveMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check: return", "if not action: print('[Error] 請輸入操作名稱') line() continue if action not in ('create account',", "'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError':", "line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0", "Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額',", "'有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!',", "'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")',", "'[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account", "') if not email: print(output('EmailNull')) line() continue password = input('請輸入密碼: ') if not", "check: return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if confirm == 'True':", "'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account Error 'EmailNotFound':", "continue if action not in ('create account', 'delete account', 'save money', 'take money',", "請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用',", "= input('請輸入金額: ') if not amount: print(output('AmountNull')) line() continue if action == 'create", "輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save money\"', '取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"',", "email in self.accountDict: return 'EmailUsed' if not name: return 'NameNull' self.accountDict[email] = {'name':", "check return self.accountDict[email]['deposit'] Bank = bank() def output(string:str): stringDict = { # Null", "email: print(output('EmailNull')) line() continue password = input('請輸入密碼: ') if not password: print(output('PasswordNull')) line()", "= True while tragger: action = input('輸入您要進行的操作: ') if not action: print('[Error] 請輸入操作名稱')", "print('[Error] 請輸入操作名稱') line() continue if action not in ('create account', 'delete account', 'save", "account\")', '刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save money\"', '取款 請輸入\"take money\"', '離開此系統 請輸入\"leave\"', sep='\\n')", "if action in ('save money', 'take money'): amount = input('請輸入金額: ') if not", "stringDict = { # Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error]", "password): check = Bank.inputCheck(email, password) if check: return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False", "confirm == 'True': del self.accountDict[email] return 'ActionSuccessful' if confirm == 'False': return 'ActionCancel'", "line() tragger = True while tragger: action = input('輸入您要進行的操作: ') if not action:", "tragger = True while tragger: action = input('輸入您要進行的操作: ') if not action: print('[Error]", "Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action", "while tragger: action = input('輸入您要進行的操作: ') if not action: print('[Error] 請輸入操作名稱') line() continue", "Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit']", "money', 'leave'): print('[Error] 請輸入正確的操作') line() continue if action == 'leave': print('程式關閉') tragger =", "if not password: print(output('PasswordNull')) line() continue if action in ('save money', 'take money'):", "'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press any key to continue . . .')", "== 'False': return 'ActionCancel' return 'UnknowAction' def saveMoney(self, email, password, amount): check =", "'delete account': out = Bank.deleteAccount(email, password) elif action == 'save money': out =", "if check: return check if amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit'] < amount:", "'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email, password): check = Bank.inputCheck(email, password) if check:", "= Bank.saveMoney(email, password, amount) elif action == 'take money': out = Bank.takeMoney(email, password,", "if self.accountDict[email]['password'] != password: return 'PasswrodError' return None def createAccount(self, email, password, name):", "self.accountDict[email]['deposit'] Bank = bank() def output(string:str): stringDict = { # Null 'EmailNull': '[Error]", "CC BY-NC-ND 3.0 TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"',", "self.accountDict[email] return 'ActionSuccessful' if confirm == 'False': return 'ActionCancel' return 'UnknowAction' def saveMoney(self,", "account', 'save money', 'take money', 'leave'): print('[Error] 請輸入正確的操作') line() continue if action ==", "= False if action in ('create account', 'delete account', 'save money', 'take money'):", "'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足', #", "'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功' }", "money'): amount = input('請輸入金額: ') if not amount: print(output('AmountNull')) line() continue if action", "amount) print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press", "資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error]", "def createAccount(self, email, password, name): if email in self.accountDict: return 'EmailUsed' if not", "'save money': out = Bank.saveMoney(email, password, amount) elif action == 'take money': out", "'成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足',", "in ('create account', 'delete account', 'save money', 'take money', 'leave'): print('[Error] 請輸入正確的操作') line()", "return check if amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough'", "請輸入正確的操作') line() continue if action == 'leave': print('程式關閉') tragger = False if action", "TW', sep='\\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save money\"',", "print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入\"create account\")', '刪除帳戶 請輸入\"delete account\"', '存款 請輸入\"save money\"', '取款 請輸入\"take money\"',", "') out = Bank.createAccount(email, password, name) elif action == 'delete account': out =", "return 'NameNull' self.accountDict[email] = {'name': name, 'password': password, 'deposit': 0} return 'CreateSuccessful' def", "elif action == 'take money': out = Bank.takeMoney(email, password, amount) print(output(out)) if out", "__init__(self): self.accountDict = {} def inputCheck(self, email, password): if email not in self.accountDict:", "def output(string:str): stringDict = { # Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼',", "account', 'delete account', 'save money', 'take money'): email = input('請輸入E-mail: ') if not", "'AmountEnterError' def showDeposit(self, email, password): check = Bank.inputCheck(email, password) if check: return check", "money', 'take money'): email = input('請輸入E-mail: ') if not email: print(output('EmailNull')) line() continue", "'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092',", "check = Bank.inputCheck(email, password) if check: return check return self.accountDict[email]['deposit'] Bank = bank()", "check = Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount = int(amount)", "return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email, password, amount): check = Bank.inputCheck(email, password)", "input('請輸入金額: ') if not amount: print(output('AmountNull')) line() continue if action == 'create account':", "if action not in ('create account', 'delete account', 'save money', 'take money', 'leave'):", "password): if email not in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] != password: return", "'take money'): amount = input('請輸入金額: ') if not amount: print(output('AmountNull')) line() continue if", "'AmountEnterError' def takeMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check: return", "password) if check: return check confirm = input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if confirm", "print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW',", "line() continue if action == 'leave': print('程式關閉') tragger = False if action in", "'EmailNotFound' if self.accountDict[email]['password'] != password: return 'PasswrodError' return None def createAccount(self, email, password,", "tragger: action = input('輸入您要進行的操作: ') if not action: print('[Error] 請輸入操作名稱') line() continue if", "return 'CreateSuccessful' def deleteAccount(self, email, password): check = Bank.inputCheck(email, password) if check: return", "not amount: print(output('AmountNull')) line() continue if action == 'create account': name = input('請輸入使用者名稱:", "True while tragger: action = input('輸入您要進行的操作: ') if not action: print('[Error] 請輸入操作名稱') line()", "True/False ): ') if confirm == 'True': del self.accountDict[email] return 'ActionSuccessful' if confirm", "print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\\n') line()", "return stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權", "False if action in ('create account', 'delete account', 'save money', 'take money'): email", "action == 'save money': out = Bank.saveMoney(email, password, amount) elif action == 'take", "inputCheck(self, email, password): if email not in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] !=", "= bank() def output(string:str): stringDict = { # Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull':", "check: return check if amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit'] < amount: return", "line() continue if action in ('save money', 'take money'): amount = input('請輸入金額: ')", "password) if check: return check return self.accountDict[email]['deposit'] Bank = bank() def output(string:str): stringDict", "= { # Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱',", "input(f'是否確定刪除{self.accountDict[email][\"name\"]}( True/False ): ') if confirm == 'True': del self.accountDict[email] return 'ActionSuccessful' if", "action: print('[Error] 請輸入操作名稱') line() continue if action not in ('create account', 'delete account',", "'PasswrodError' return None def createAccount(self, email, password, name): if email in self.accountDict: return", "bank: def __init__(self): self.accountDict = {} def inputCheck(self, email, password): if email not", "in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] != password: return 'PasswrodError' return None def", "if not name: return 'NameNull' self.accountDict[email] = {'name': name, 'password': password, 'deposit': 0}", "if not amount: print(output('AmountNull')) line() continue if action == 'create account': name =", "self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return 'AmountEnterError' def", "in self.accountDict: return 'EmailUsed' if not name: return 'NameNull' self.accountDict[email] = {'name': name,", "= {} def inputCheck(self, email, password): if email not in self.accountDict: return 'EmailNotFound'", "操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤',", "Bank.takeMoney(email, password, amount) print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}')", "email not in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] != password: return 'PasswrodError' return", "self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email, password, amount): check", "if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press any key", "= Bank.deleteAccount(email, password) elif action == 'save money': out = Bank.saveMoney(email, password, amount)", "') if not amount: print(output('AmountNull')) line() continue if action == 'create account': name", "if email in self.accountDict: return 'EmailUsed' if not name: return 'NameNull' self.accountDict[email] =" ]
[ ".logger import LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines", "as well as configuration for logging. Args: name (Optional[str]): The name of the", "description (Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ): from", "for logging. Args: name (Optional[str]): The name of the mode, defaults to 'default'.", ".resource import ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines a \"mode\" in which", "mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system storage options available when executing in", "key_type=str, value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() )", "of system storage options available when executing in this mode. Defaults to 'in_memory'", "and 'filesystem'. description (Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None,", "check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def in self.system_storage_defs: if system_storage_def.name", "def get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def in self.system_storage_defs: if system_storage_def.name == name:", "'name') for system_storage_def in self.system_storage_defs: if system_storage_def.name == name: return system_storage_def check.failed('{} storage", "self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ): from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage", "system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system storage options available when executing in this", "in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system storage options available when", "'resources', key_type=str, value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers()", "= check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers =", "check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs", "The set of loggers to use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set", "The name of the mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The set of", "(Optional[List[SystemStorageDefinition]]): The set of system storage options available when executing in this mode.", "[mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name,", "from dagster.loggers import default_loggers from .logger import LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME", "storage options available when executing in this mode. Defaults to 'in_memory' and 'filesystem'.", "system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description')", "as configuration for logging. Args: name (Optional[str]): The name of the mode, defaults", ") self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, )", "to 'default'. resources (Optional[List[ResourceDefinition]]): The set of resources for this mode. loggers (Optional[List[LoggerDefinition]]):", "implementations as well as configuration for logging. Args: name (Optional[str]): The name of", "executing in this mode. Defaults to 'in_memory' and 'filesystem'. description (Optional[str]) ''' def", "(Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ): from .system_storage", "= check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description =", "the mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The set of resources for this", "of the mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The set of resources for", "'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name, 'name') for", "mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition", "set of resource implementations as well as configuration for logging. Args: name (Optional[str]):", "check.str_param(name, 'name') for system_storage_def in self.system_storage_defs: if system_storage_def.name == name: return system_storage_def check.failed('{}", "loggers (Optional[List[LoggerDefinition]]): The set of loggers to use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]):", "which a pipeline can operate. A mode provides a set of resource implementations", "provides a set of resource implementations as well as configuration for logging. Args:", ".system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources,", "name (Optional[str]): The name of the mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The", "loggers=None, system_storage_defs=None, description=None, ): from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name,", "<reponame>atsuhiro/dagster<filename>python_modules/dagster/dagster/core/definitions/mode.py from dagster import check from dagster.loggers import default_loggers from .logger import LoggerDefinition", "ModeDefinition: '''Defines a \"mode\" in which a pipeline can operate. A mode provides", "(Optional[List[ResourceDefinition]]): The set of resources for this mode. loggers (Optional[List[LoggerDefinition]]): The set of", "'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs else", "resources for this mode. loggers (Optional[List[LoggerDefinition]]): The set of loggers to use in", "'default'. resources (Optional[List[ResourceDefinition]]): The set of resources for this mode. loggers (Optional[List[LoggerDefinition]]): The", "get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def in self.system_storage_defs: if system_storage_def.name == name: return", "dagster import check from dagster.loggers import default_loggers from .logger import LoggerDefinition from .resource", "SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str,", "'name') self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers,", "in self.system_storage_defs: if system_storage_def.name == name: return system_storage_def check.failed('{} storage definition not found'.format(name))", "defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The set of resources for this mode. loggers", "__init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ): from .system_storage import SystemStorageDefinition, mem_system_storage,", "mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The set of resources for this mode.", "The set of system storage options available when executing in this mode. Defaults", "key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage,", "'in_memory' and 'filesystem'. description (Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None,", "Defaults to 'in_memory' and 'filesystem'. description (Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None,", "in this mode. Defaults to 'in_memory' and 'filesystem'. description (Optional[str]) ''' def __init__(", "import LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines a", "Args: name (Optional[str]): The name of the mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]):", "or default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def',", "else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self, name):", "import check from dagster.loggers import default_loggers from .logger import LoggerDefinition from .resource import", "system_storage_def in self.system_storage_defs: if system_storage_def.name == name: return system_storage_def check.failed('{} storage definition not", "can operate. A mode provides a set of resource implementations as well as", "A mode provides a set of resource implementations as well as configuration for", "self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs = check.list_param(", "mode provides a set of resource implementations as well as configuration for logging.", "from dagster import check from dagster.loggers import default_loggers from .logger import LoggerDefinition from", "(Optional[List[LoggerDefinition]]): The set of loggers to use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The", "system storage options available when executing in this mode. Defaults to 'in_memory' and", "self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers, 'loggers',", "description=None, ): from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs", "this mode. Defaults to 'in_memory' and 'filesystem'. description (Optional[str]) ''' def __init__( self,", "a pipeline can operate. A mode provides a set of resource implementations as", "a \"mode\" in which a pipeline can operate. A mode provides a set", "check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers = (", "= ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs", "check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition)", "well as configuration for logging. Args: name (Optional[str]): The name of the mode,", "resource implementations as well as configuration for logging. Args: name (Optional[str]): The name", "(Optional[str]): The name of the mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The set", "= 'default' class ModeDefinition: '''Defines a \"mode\" in which a pipeline can operate.", "self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description", "set of loggers to use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of", "of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def", "use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system storage options available", "configuration for logging. Args: name (Optional[str]): The name of the mode, defaults to", "value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs", "of resource implementations as well as configuration for logging. Args: name (Optional[str]): The", "''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ): from .system_storage import", "resources=None, loggers=None, system_storage_defs=None, description=None, ): from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name =", "value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage],", "set of system storage options available when executing in this mode. Defaults to", "check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description,", "mode. loggers (Optional[List[LoggerDefinition]]): The set of loggers to use in this mode. system_storage_defs", "): from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs =", "= check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str,", "fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name, 'name')", "loggers to use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system storage", "mode. Defaults to 'in_memory' and 'filesystem'. description (Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME,", "this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system storage options available when executing", "of resources for this mode. loggers (Optional[List[LoggerDefinition]]): The set of loggers to use", ") self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def in", "system_storage_defs=None, description=None, ): from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name')", "from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs = check.opt_dict_param(", "operate. A mode provides a set of resource implementations as well as configuration", "resources (Optional[List[ResourceDefinition]]): The set of resources for this mode. loggers (Optional[List[LoggerDefinition]]): The set", "import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources, 'resources',", "options available when executing in this mode. Defaults to 'in_memory' and 'filesystem'. description", "default_loggers from .logger import LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME = 'default' class", "if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description') def", "system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self,", "from .resource import ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines a \"mode\" in", "pipeline can operate. A mode provides a set of resource implementations as well", "import default_loggers from .logger import LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME = 'default'", "logging. Args: name (Optional[str]): The name of the mode, defaults to 'default'. resources", "LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines a \"mode\"", "'filesystem'. description (Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ):", "default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition,", "of loggers to use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system", "for system_storage_def in self.system_storage_defs: if system_storage_def.name == name: return system_storage_def check.failed('{} storage definition", "import ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines a \"mode\" in which a", "self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def in self.system_storage_defs:", "name of the mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The set of resources", "\"mode\" in which a pipeline can operate. A mode provides a set of", "check from dagster.loggers import default_loggers from .logger import LoggerDefinition from .resource import ResourceDefinition", "in which a pipeline can operate. A mode provides a set of resource", "resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or", "available when executing in this mode. Defaults to 'in_memory' and 'filesystem'. description (Optional[str])", "for this mode. loggers (Optional[List[LoggerDefinition]]): The set of loggers to use in this", "ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines a \"mode\" in which a pipeline", ") self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs =", "def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ): from .system_storage import SystemStorageDefinition,", "( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs if", "'description') def get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def in self.system_storage_defs: if system_storage_def.name ==", "self.name = check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers", "to 'in_memory' and 'filesystem'. description (Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None,", "name): check.str_param(name, 'name') for system_storage_def in self.system_storage_defs: if system_storage_def.name == name: return system_storage_def", "set of resources for this mode. loggers (Optional[List[LoggerDefinition]]): The set of loggers to", "name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ): from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name", "from .logger import LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition:", "fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition )", "The set of resources for this mode. loggers (Optional[List[LoggerDefinition]]): The set of loggers", "'''Defines a \"mode\" in which a pipeline can operate. A mode provides a", "when executing in this mode. Defaults to 'in_memory' and 'filesystem'. description (Optional[str]) '''", "DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines a \"mode\" in which a pipeline can", "this mode. loggers (Optional[List[LoggerDefinition]]): The set of loggers to use in this mode.", "to use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system storage options", "dagster.loggers import default_loggers from .logger import LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME =", "a set of resource implementations as well as configuration for logging. Args: name", "= check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def in self.system_storage_defs: if", "class ModeDefinition: '''Defines a \"mode\" in which a pipeline can operate. A mode", "'default' class ModeDefinition: '''Defines a \"mode\" in which a pipeline can operate. A" ]
[ "with open('links.json', 'w') as f: encoded = json.dumps(decoded, indent=2) f.write(encoded) if __name__ ==", "# Have the programme connect to a site and pulls out all the", "https://') pageScraper = PageScraper(url) links = [i for i in pageScraper.links()] images =", "images }) with open('links.json', 'w') as f: encoded = json.dumps(decoded, indent=2) f.write(encoded) if", "yield link.attr['href'] def images(self): for image in self.dom.find('img'): yield image.attr['src'] def main(): url", "dom def links(self): for link in self.dom.find('a'): yield link.attr['href'] def images(self): for image", "save them to a list. class PageScraper: def __init__(self, url): self.url = url", "to a list. class PageScraper: def __init__(self, url): self.url = url self.parser =", "requests.get(self.url) html = req.text dom = self.parser.feed(html) return dom def links(self): for link", "'r') as f: encoded = f.read() decoded = json.loads(encoded) if len(encoded) else []", "len(encoded) else [] for i in decoded: if i['site'] == url: return decoded.append({", "input('Enter a URL: https://') pageScraper = PageScraper(url) links = [i for i in", "ehp # Page Scraper # Have the programme connect to a site and", "= ehp.Html() self.dom = self.__dom() def __dom(self): req = requests.get(self.url) html = req.text", "pageScraper.images()] with open('links.json', 'r') as f: encoded = f.read() decoded = json.loads(encoded) if", "url = 'https://' + input('Enter a URL: https://') pageScraper = PageScraper(url) links =", "import requests import json import ehp # Page Scraper # Have the programme", "out all the links, or images, and save them to a list. class", "for i in decoded: if i['site'] == url: return decoded.append({ 'site': url, 'links':", "decoded.append({ 'site': url, 'links': links, 'images': images }) with open('links.json', 'w') as f:", "__dom(self): req = requests.get(self.url) html = req.text dom = self.parser.feed(html) return dom def", "in pageScraper.images()] with open('links.json', 'r') as f: encoded = f.read() decoded = json.loads(encoded)", "url): self.url = url self.parser = ehp.Html() self.dom = self.__dom() def __dom(self): req", "yield image.attr['src'] def main(): url = 'https://' + input('Enter a URL: https://') pageScraper", "class PageScraper: def __init__(self, url): self.url = url self.parser = ehp.Html() self.dom =", "links(self): for link in self.dom.find('a'): yield link.attr['href'] def images(self): for image in self.dom.find('img'):", "to a site and pulls out all the links, or images, and save", "= 'https://' + input('Enter a URL: https://') pageScraper = PageScraper(url) links = [i", "Page Scraper # Have the programme connect to a site and pulls out", "f.read() decoded = json.loads(encoded) if len(encoded) else [] for i in decoded: if", "'links': links, 'images': images }) with open('links.json', 'w') as f: encoded = json.dumps(decoded,", "self.parser = ehp.Html() self.dom = self.__dom() def __dom(self): req = requests.get(self.url) html =", "'w') as f: encoded = json.dumps(decoded, indent=2) f.write(encoded) if __name__ == '__main__': main()", "requests import json import ehp # Page Scraper # Have the programme connect", "a list. class PageScraper: def __init__(self, url): self.url = url self.parser = ehp.Html()", "= self.__dom() def __dom(self): req = requests.get(self.url) html = req.text dom = self.parser.feed(html)", "self.dom = self.__dom() def __dom(self): req = requests.get(self.url) html = req.text dom =", "[i for i in pageScraper.images()] with open('links.json', 'r') as f: encoded = f.read()", "connect to a site and pulls out all the links, or images, and", "json import ehp # Page Scraper # Have the programme connect to a", "= requests.get(self.url) html = req.text dom = self.parser.feed(html) return dom def links(self): for", "programme connect to a site and pulls out all the links, or images,", "import ehp # Page Scraper # Have the programme connect to a site", "pulls out all the links, or images, and save them to a list.", "'images': images }) with open('links.json', 'w') as f: encoded = json.dumps(decoded, indent=2) f.write(encoded)", "and save them to a list. class PageScraper: def __init__(self, url): self.url =", "as f: encoded = f.read() decoded = json.loads(encoded) if len(encoded) else [] for", "= url self.parser = ehp.Html() self.dom = self.__dom() def __dom(self): req = requests.get(self.url)", "list. class PageScraper: def __init__(self, url): self.url = url self.parser = ehp.Html() self.dom", "req.text dom = self.parser.feed(html) return dom def links(self): for link in self.dom.find('a'): yield", "req = requests.get(self.url) html = req.text dom = self.parser.feed(html) return dom def links(self):", "def images(self): for image in self.dom.find('img'): yield image.attr['src'] def main(): url = 'https://'", "__init__(self, url): self.url = url self.parser = ehp.Html() self.dom = self.__dom() def __dom(self):", "decoded = json.loads(encoded) if len(encoded) else [] for i in decoded: if i['site']", "== url: return decoded.append({ 'site': url, 'links': links, 'images': images }) with open('links.json',", "PageScraper(url) links = [i for i in pageScraper.links()] images = [i for i", "= req.text dom = self.parser.feed(html) return dom def links(self): for link in self.dom.find('a'):", "url self.parser = ehp.Html() self.dom = self.__dom() def __dom(self): req = requests.get(self.url) html", "self.url = url self.parser = ehp.Html() self.dom = self.__dom() def __dom(self): req =", "= self.parser.feed(html) return dom def links(self): for link in self.dom.find('a'): yield link.attr['href'] def", "main(): url = 'https://' + input('Enter a URL: https://') pageScraper = PageScraper(url) links", "images, and save them to a list. class PageScraper: def __init__(self, url): self.url", "links = [i for i in pageScraper.links()] images = [i for i in", "Scraper # Have the programme connect to a site and pulls out all", "def links(self): for link in self.dom.find('a'): yield link.attr['href'] def images(self): for image in", "+ input('Enter a URL: https://') pageScraper = PageScraper(url) links = [i for i", "URL: https://') pageScraper = PageScraper(url) links = [i for i in pageScraper.links()] images", "= f.read() decoded = json.loads(encoded) if len(encoded) else [] for i in decoded:", "in self.dom.find('a'): yield link.attr['href'] def images(self): for image in self.dom.find('img'): yield image.attr['src'] def", "and pulls out all the links, or images, and save them to a", "Have the programme connect to a site and pulls out all the links,", "open('links.json', 'r') as f: encoded = f.read() decoded = json.loads(encoded) if len(encoded) else", "'https://' + input('Enter a URL: https://') pageScraper = PageScraper(url) links = [i for", "images(self): for image in self.dom.find('img'): yield image.attr['src'] def main(): url = 'https://' +", "i['site'] == url: return decoded.append({ 'site': url, 'links': links, 'images': images }) with", "def main(): url = 'https://' + input('Enter a URL: https://') pageScraper = PageScraper(url)", "else [] for i in decoded: if i['site'] == url: return decoded.append({ 'site':", "or images, and save them to a list. class PageScraper: def __init__(self, url):", "self.parser.feed(html) return dom def links(self): for link in self.dom.find('a'): yield link.attr['href'] def images(self):", "self.dom.find('a'): yield link.attr['href'] def images(self): for image in self.dom.find('img'): yield image.attr['src'] def main():", "if i['site'] == url: return decoded.append({ 'site': url, 'links': links, 'images': images })", "PageScraper: def __init__(self, url): self.url = url self.parser = ehp.Html() self.dom = self.__dom()", "the links, or images, and save them to a list. class PageScraper: def", "site and pulls out all the links, or images, and save them to", "them to a list. class PageScraper: def __init__(self, url): self.url = url self.parser", "'site': url, 'links': links, 'images': images }) with open('links.json', 'w') as f: encoded", "image.attr['src'] def main(): url = 'https://' + input('Enter a URL: https://') pageScraper =", "all the links, or images, and save them to a list. class PageScraper:", "links, or images, and save them to a list. class PageScraper: def __init__(self,", "[i for i in pageScraper.links()] images = [i for i in pageScraper.images()] with", "import json import ehp # Page Scraper # Have the programme connect to", "link in self.dom.find('a'): yield link.attr['href'] def images(self): for image in self.dom.find('img'): yield image.attr['src']", "self.__dom() def __dom(self): req = requests.get(self.url) html = req.text dom = self.parser.feed(html) return", "}) with open('links.json', 'w') as f: encoded = json.dumps(decoded, indent=2) f.write(encoded) if __name__", "the programme connect to a site and pulls out all the links, or", "= PageScraper(url) links = [i for i in pageScraper.links()] images = [i for", "json.loads(encoded) if len(encoded) else [] for i in decoded: if i['site'] == url:", "def __dom(self): req = requests.get(self.url) html = req.text dom = self.parser.feed(html) return dom", "i in decoded: if i['site'] == url: return decoded.append({ 'site': url, 'links': links,", "return decoded.append({ 'site': url, 'links': links, 'images': images }) with open('links.json', 'w') as", "in pageScraper.links()] images = [i for i in pageScraper.images()] with open('links.json', 'r') as", "html = req.text dom = self.parser.feed(html) return dom def links(self): for link in", "ehp.Html() self.dom = self.__dom() def __dom(self): req = requests.get(self.url) html = req.text dom", "with open('links.json', 'r') as f: encoded = f.read() decoded = json.loads(encoded) if len(encoded)", "image in self.dom.find('img'): yield image.attr['src'] def main(): url = 'https://' + input('Enter a", "encoded = f.read() decoded = json.loads(encoded) if len(encoded) else [] for i in", "def __init__(self, url): self.url = url self.parser = ehp.Html() self.dom = self.__dom() def", "# Page Scraper # Have the programme connect to a site and pulls", "i in pageScraper.images()] with open('links.json', 'r') as f: encoded = f.read() decoded =", "= [i for i in pageScraper.links()] images = [i for i in pageScraper.images()]", "pageScraper = PageScraper(url) links = [i for i in pageScraper.links()] images = [i", "= json.loads(encoded) if len(encoded) else [] for i in decoded: if i['site'] ==", "[] for i in decoded: if i['site'] == url: return decoded.append({ 'site': url,", "links, 'images': images }) with open('links.json', 'w') as f: encoded = json.dumps(decoded, indent=2)", "for i in pageScraper.links()] images = [i for i in pageScraper.images()] with open('links.json',", "in decoded: if i['site'] == url: return decoded.append({ 'site': url, 'links': links, 'images':", "self.dom.find('img'): yield image.attr['src'] def main(): url = 'https://' + input('Enter a URL: https://')", "url, 'links': links, 'images': images }) with open('links.json', 'w') as f: encoded =", "pageScraper.links()] images = [i for i in pageScraper.images()] with open('links.json', 'r') as f:", "for image in self.dom.find('img'): yield image.attr['src'] def main(): url = 'https://' + input('Enter", "if len(encoded) else [] for i in decoded: if i['site'] == url: return", "dom = self.parser.feed(html) return dom def links(self): for link in self.dom.find('a'): yield link.attr['href']", "images = [i for i in pageScraper.images()] with open('links.json', 'r') as f: encoded", "open('links.json', 'w') as f: encoded = json.dumps(decoded, indent=2) f.write(encoded) if __name__ == '__main__':", "f: encoded = f.read() decoded = json.loads(encoded) if len(encoded) else [] for i", "decoded: if i['site'] == url: return decoded.append({ 'site': url, 'links': links, 'images': images", "for i in pageScraper.images()] with open('links.json', 'r') as f: encoded = f.read() decoded", "a site and pulls out all the links, or images, and save them", "link.attr['href'] def images(self): for image in self.dom.find('img'): yield image.attr['src'] def main(): url =", "url: return decoded.append({ 'site': url, 'links': links, 'images': images }) with open('links.json', 'w')", "i in pageScraper.links()] images = [i for i in pageScraper.images()] with open('links.json', 'r')", "= [i for i in pageScraper.images()] with open('links.json', 'r') as f: encoded =", "in self.dom.find('img'): yield image.attr['src'] def main(): url = 'https://' + input('Enter a URL:", "for link in self.dom.find('a'): yield link.attr['href'] def images(self): for image in self.dom.find('img'): yield", "a URL: https://') pageScraper = PageScraper(url) links = [i for i in pageScraper.links()]", "return dom def links(self): for link in self.dom.find('a'): yield link.attr['href'] def images(self): for" ]
[ "import * from fnss.adapters.ns2 import * from fnss.adapters.omnetpp import * from fnss.adapters.jfed import", "exporting and importing FNSS data structures (topologies, event schedules and traffic matrices) to/from", "event schedules and traffic matrices) to/from other simulators or emulators \"\"\" from fnss.adapters.autonetkit", "for exporting and importing FNSS data structures (topologies, event schedules and traffic matrices)", "(topologies, event schedules and traffic matrices) to/from other simulators or emulators \"\"\" from", "* from fnss.adapters.mn import * from fnss.adapters.ns2 import * from fnss.adapters.omnetpp import *", "importing FNSS data structures (topologies, event schedules and traffic matrices) to/from other simulators", "import * from fnss.adapters.mn import * from fnss.adapters.ns2 import * from fnss.adapters.omnetpp import", "schedules and traffic matrices) to/from other simulators or emulators \"\"\" from fnss.adapters.autonetkit import", "to/from other simulators or emulators \"\"\" from fnss.adapters.autonetkit import * from fnss.adapters.mn import", "structures (topologies, event schedules and traffic matrices) to/from other simulators or emulators \"\"\"", "from fnss.adapters.autonetkit import * from fnss.adapters.mn import * from fnss.adapters.ns2 import * from", "simulators or emulators \"\"\" from fnss.adapters.autonetkit import * from fnss.adapters.mn import * from", "fnss.adapters.mn import * from fnss.adapters.ns2 import * from fnss.adapters.omnetpp import * from fnss.adapters.jfed", "FNSS data structures (topologies, event schedules and traffic matrices) to/from other simulators or", "traffic matrices) to/from other simulators or emulators \"\"\" from fnss.adapters.autonetkit import * from", "and traffic matrices) to/from other simulators or emulators \"\"\" from fnss.adapters.autonetkit import *", "other simulators or emulators \"\"\" from fnss.adapters.autonetkit import * from fnss.adapters.mn import *", "\"\"\"Tools for exporting and importing FNSS data structures (topologies, event schedules and traffic", "from fnss.adapters.mn import * from fnss.adapters.ns2 import * from fnss.adapters.omnetpp import * from", "* from fnss.adapters.ns2 import * from fnss.adapters.omnetpp import * from fnss.adapters.jfed import *", "or emulators \"\"\" from fnss.adapters.autonetkit import * from fnss.adapters.mn import * from fnss.adapters.ns2", "fnss.adapters.autonetkit import * from fnss.adapters.mn import * from fnss.adapters.ns2 import * from fnss.adapters.omnetpp", "\"\"\" from fnss.adapters.autonetkit import * from fnss.adapters.mn import * from fnss.adapters.ns2 import *", "matrices) to/from other simulators or emulators \"\"\" from fnss.adapters.autonetkit import * from fnss.adapters.mn", "emulators \"\"\" from fnss.adapters.autonetkit import * from fnss.adapters.mn import * from fnss.adapters.ns2 import", "and importing FNSS data structures (topologies, event schedules and traffic matrices) to/from other", "data structures (topologies, event schedules and traffic matrices) to/from other simulators or emulators" ]
[ "Table( \"t1\", meta, Column(\"id\", Integer, primary_key=True), Column(\"name\", String) ) async def async_main(): #", "sqlalchemy.engine.Result object result = await conn.execute(t1.select()) # the results are buffered so no", "# this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select()) # this object", "Column(\"id\", Integer, primary_key=True), Column(\"name\", String) ) async def async_main(): # engine is an", "a traditional \"await execute()\" # pattern is used. await conn.execute( t1.insert(), [{\"name\": \"some", "2\"}] ) async with engine.connect() as conn: # the default result object is", "synchronous IO calls will be transparently translated for # await. await conn.run_sync(meta.drop_all) await", "create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn is an instance of AsyncConnection async with", "# the results are buffered so no await call is necessary # for", "\"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn is an instance of AsyncConnection async with engine.begin()", "iteration and awaitable # versions of methods like .all(), fetchmany(), etc. async for", "the # result at time, the AsyncConnection.stream() method is used. # this returns", "MetaData() t1 = Table( \"t1\", meta, Column(\"id\", Integer, primary_key=True), Column(\"name\", String) ) async", "normal statement execution, a traditional \"await execute()\" # pattern is used. await conn.execute(", "\"some name 2\"}] ) async with engine.connect() as conn: # the default result", "is necessary # for this case. print(result.fetchall()) # for a streaming result that", "async engine created by :func:`_engine.create_async_engine`. We then use it using await within a", "statement execution, a traditional \"await execute()\" # pattern is used. await conn.execute( t1.insert(),", "an async engine created by :func:`_engine.create_async_engine`. We then use it using await within", "the AsyncConnection object to any synchronous method, # where synchronous IO calls will", "conn.execute(t1.select()) # the results are buffered so no await call is necessary #", "this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select()) # this object supports", "buffered so no await call is necessary # for this case. print(result.fetchall()) #", "import asyncio from sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy import", "# this object supports async iteration and awaitable # versions of methods like", "= await conn.stream(t1.select()) # this object supports async iteration and awaitable # versions", "await conn.run_sync(meta.create_all) # for normal statement execution, a traditional \"await execute()\" # pattern", "from sqlalchemy.ext.asyncio import create_async_engine meta = MetaData() t1 = Table( \"t1\", meta, Column(\"id\",", "object result = await conn.execute(t1.select()) # the results are buffered so no await", "this case. print(result.fetchall()) # for a streaming result that buffers only segments of", "await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal statement execution, a traditional \"await execute()\"", "used. # this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select()) # this", "a \"sync\" # version of the AsyncConnection object to any synchronous method, #", "calls will be transparently translated for # await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) #", "no await call is necessary # for this case. print(result.fetchall()) # for a", "created by :func:`_engine.create_async_engine`. We then use it using await within a coroutine. \"\"\"", "call is necessary # for this case. print(result.fetchall()) # for a streaming result", "where synchronous IO calls will be transparently translated for # await. await conn.run_sync(meta.drop_all)", "async iteration and awaitable # versions of methods like .all(), fetchmany(), etc. async", "name 2\"}] ) async with engine.connect() as conn: # the default result object", "a coroutine. \"\"\" import asyncio from sqlalchemy import Column from sqlalchemy import Integer", "t1 = Table( \"t1\", meta, Column(\"id\", Integer, primary_key=True), Column(\"name\", String) ) async def", "well as legacy functions, the # AsyncConnection.run_sync() awaitable method will pass a \"sync\"", "asyncio engine / connection interface. In this example, we have an async engine", "sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy", "the asyncio engine / connection interface. In this example, we have an async", "for # await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal statement execution, a", "object to any synchronous method, # where synchronous IO calls will be transparently", "engine created by :func:`_engine.create_async_engine`. We then use it using await within a coroutine.", "traditional \"await execute()\" # pattern is used. await conn.execute( t1.insert(), [{\"name\": \"some name", "print(result.fetchall()) # for a streaming result that buffers only segments of the #", "it using await within a coroutine. \"\"\" import asyncio from sqlalchemy import Column", "await conn.execute(t1.select()) # the results are buffered so no await call is necessary", "a streaming result that buffers only segments of the # result at time,", "sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.ext.asyncio", "as conn: # to support SQLAlchemy DDL methods as well as legacy functions,", "supports async iteration and awaitable # versions of methods like .all(), fetchmany(), etc.", "\"\"\" import asyncio from sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy", "from sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy import MetaData from", "\"some name 1\"}, {\"name\": \"some name 2\"}] ) async with engine.connect() as conn:", "an instance of AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn is", "an instance of AsyncConnection async with engine.begin() as conn: # to support SQLAlchemy", "# versions of methods like .all(), fetchmany(), etc. async for row in async_result:", "# AsyncConnection.run_sync() awaitable method will pass a \"sync\" # version of the AsyncConnection", "# sqlalchemy.engine.Result object result = await conn.execute(t1.select()) # the results are buffered so", "conn.run_sync(meta.create_all) # for normal statement execution, a traditional \"await execute()\" # pattern is", "returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select()) # this object supports async", "time, the AsyncConnection.stream() method is used. # this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result", "engine is an instance of AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) #", "of AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn is an instance", "# version of the AsyncConnection object to any synchronous method, # where synchronous", "case. print(result.fetchall()) # for a streaming result that buffers only segments of the", "String from sqlalchemy import Table from sqlalchemy.ext.asyncio import create_async_engine meta = MetaData() t1", "is used. # this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select()) #", "String) ) async def async_main(): # engine is an instance of AsyncEngine engine", "the AsyncConnection.stream() method is used. # this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result =", "then use it using await within a coroutine. \"\"\" import asyncio from sqlalchemy", "Table from sqlalchemy.ext.asyncio import create_async_engine meta = MetaData() t1 = Table( \"t1\", meta,", "await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal statement execution, a traditional \"await", "conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal statement execution, a traditional \"await execute()\" #", "import Table from sqlalchemy.ext.asyncio import create_async_engine meta = MetaData() t1 = Table( \"t1\",", "will pass a \"sync\" # version of the AsyncConnection object to any synchronous", "AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn is an instance of", "async with engine.begin() as conn: # to support SQLAlchemy DDL methods as well", "from sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy import Table from", "DDL methods as well as legacy functions, the # AsyncConnection.run_sync() awaitable method will", "# for normal statement execution, a traditional \"await execute()\" # pattern is used.", "sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy", "be transparently translated for # await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal", "is used. await conn.execute( t1.insert(), [{\"name\": \"some name 1\"}, {\"name\": \"some name 2\"}]", "= Table( \"t1\", meta, Column(\"id\", Integer, primary_key=True), Column(\"name\", String) ) async def async_main():", "Integer from sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy import Table", "async def async_main(): # engine is an instance of AsyncEngine engine = create_async_engine(", "with engine.connect() as conn: # the default result object is the # sqlalchemy.engine.Result", "AsyncConnection.run_sync() awaitable method will pass a \"sync\" # version of the AsyncConnection object", "at time, the AsyncConnection.stream() method is used. # this returns a sqlalchemy.ext.asyncio.AsyncResult object.", "IO calls will be transparently translated for # await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all)", "conn: # the default result object is the # sqlalchemy.engine.Result object result =", "def async_main(): # engine is an instance of AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\",", "segments of the # result at time, the AsyncConnection.stream() method is used. #", "result at time, the AsyncConnection.stream() method is used. # this returns a sqlalchemy.ext.asyncio.AsyncResult", "default result object is the # sqlalchemy.engine.Result object result = await conn.execute(t1.select()) #", "the # AsyncConnection.run_sync() awaitable method will pass a \"sync\" # version of the", "connection interface. In this example, we have an async engine created by :func:`_engine.create_async_engine`.", "1\"}, {\"name\": \"some name 2\"}] ) async with engine.connect() as conn: # the", "async_main(): # engine is an instance of AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True,", ") async def async_main(): # engine is an instance of AsyncEngine engine =", "await conn.stream(t1.select()) # this object supports async iteration and awaitable # versions of", "\"await execute()\" # pattern is used. await conn.execute( t1.insert(), [{\"name\": \"some name 1\"},", "translated for # await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal statement execution,", "example, we have an async engine created by :func:`_engine.create_async_engine`. We then use it", "legacy functions, the # AsyncConnection.run_sync() awaitable method will pass a \"sync\" # version", "as well as legacy functions, the # AsyncConnection.run_sync() awaitable method will pass a", "sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select()) # this object supports async iteration and", "method will pass a \"sync\" # version of the AsyncConnection object to any", "interface. In this example, we have an async engine created by :func:`_engine.create_async_engine`. We", "conn is an instance of AsyncConnection async with engine.begin() as conn: # to", "as legacy functions, the # AsyncConnection.run_sync() awaitable method will pass a \"sync\" #", "of the AsyncConnection object to any synchronous method, # where synchronous IO calls", "await call is necessary # for this case. print(result.fetchall()) # for a streaming", "\"\"\"Illustrates the asyncio engine / connection interface. In this example, we have an", "and awaitable # versions of methods like .all(), fetchmany(), etc. async for row", "# await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal statement execution, a traditional", "this example, we have an async engine created by :func:`_engine.create_async_engine`. We then use", "await within a coroutine. \"\"\" import asyncio from sqlalchemy import Column from sqlalchemy", "necessary # for this case. print(result.fetchall()) # for a streaming result that buffers", "we have an async engine created by :func:`_engine.create_async_engine`. We then use it using", "AsyncConnection object to any synchronous method, # where synchronous IO calls will be", "execute()\" # pattern is used. await conn.execute( t1.insert(), [{\"name\": \"some name 1\"}, {\"name\":", "meta = MetaData() t1 = Table( \"t1\", meta, Column(\"id\", Integer, primary_key=True), Column(\"name\", String)", "within a coroutine. \"\"\" import asyncio from sqlalchemy import Column from sqlalchemy import", "that buffers only segments of the # result at time, the AsyncConnection.stream() method", "engine / connection interface. In this example, we have an async engine created", "using await within a coroutine. \"\"\" import asyncio from sqlalchemy import Column from", "instance of AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn is an", "of the # result at time, the AsyncConnection.stream() method is used. # this", "result object is the # sqlalchemy.engine.Result object result = await conn.execute(t1.select()) # the", "synchronous method, # where synchronous IO calls will be transparently translated for #", "= await conn.execute(t1.select()) # the results are buffered so no await call is", "to support SQLAlchemy DDL methods as well as legacy functions, the # AsyncConnection.run_sync()", "this object supports async iteration and awaitable # versions of methods like .all(),", "# engine is an instance of AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, )", "sqlalchemy import Table from sqlalchemy.ext.asyncio import create_async_engine meta = MetaData() t1 = Table(", "asyncio from sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy import MetaData", "with engine.begin() as conn: # to support SQLAlchemy DDL methods as well as", "# to support SQLAlchemy DDL methods as well as legacy functions, the #", "\"t1\", meta, Column(\"id\", Integer, primary_key=True), Column(\"name\", String) ) async def async_main(): # engine", "for a streaming result that buffers only segments of the # result at", "awaitable method will pass a \"sync\" # version of the AsyncConnection object to", "buffers only segments of the # result at time, the AsyncConnection.stream() method is", "= MetaData() t1 = Table( \"t1\", meta, Column(\"id\", Integer, primary_key=True), Column(\"name\", String) )", "async with engine.connect() as conn: # the default result object is the #", "versions of methods like .all(), fetchmany(), etc. async for row in async_result: print(row)", "engine.begin() as conn: # to support SQLAlchemy DDL methods as well as legacy", ":func:`_engine.create_async_engine`. We then use it using await within a coroutine. \"\"\" import asyncio", "to any synchronous method, # where synchronous IO calls will be transparently translated", "AsyncConnection.stream() method is used. # this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await", "for normal statement execution, a traditional \"await execute()\" # pattern is used. await", "from sqlalchemy import Table from sqlalchemy.ext.asyncio import create_async_engine meta = MetaData() t1 =", "import Integer from sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy import", "are buffered so no await call is necessary # for this case. print(result.fetchall())", "Integer, primary_key=True), Column(\"name\", String) ) async def async_main(): # engine is an instance", "instance of AsyncConnection async with engine.begin() as conn: # to support SQLAlchemy DDL", "streaming result that buffers only segments of the # result at time, the", "import String from sqlalchemy import Table from sqlalchemy.ext.asyncio import create_async_engine meta = MetaData()", "methods as well as legacy functions, the # AsyncConnection.run_sync() awaitable method will pass", "We then use it using await within a coroutine. \"\"\" import asyncio from", "SQLAlchemy DDL methods as well as legacy functions, the # AsyncConnection.run_sync() awaitable method", "In this example, we have an async engine created by :func:`_engine.create_async_engine`. We then", ") async with engine.connect() as conn: # the default result object is the", "# the default result object is the # sqlalchemy.engine.Result object result = await", "a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select()) # this object supports async iteration", "import create_async_engine meta = MetaData() t1 = Table( \"t1\", meta, Column(\"id\", Integer, primary_key=True),", "echo=True, ) # conn is an instance of AsyncConnection async with engine.begin() as", "conn.stream(t1.select()) # this object supports async iteration and awaitable # versions of methods", "# result at time, the AsyncConnection.stream() method is used. # this returns a", "create_async_engine meta = MetaData() t1 = Table( \"t1\", meta, Column(\"id\", Integer, primary_key=True), Column(\"name\",", "sqlalchemy.ext.asyncio import create_async_engine meta = MetaData() t1 = Table( \"t1\", meta, Column(\"id\", Integer,", "conn: # to support SQLAlchemy DDL methods as well as legacy functions, the", "conn.execute( t1.insert(), [{\"name\": \"some name 1\"}, {\"name\": \"some name 2\"}] ) async with", "result that buffers only segments of the # result at time, the AsyncConnection.stream()", "pass a \"sync\" # version of the AsyncConnection object to any synchronous method,", "meta, Column(\"id\", Integer, primary_key=True), Column(\"name\", String) ) async def async_main(): # engine is", "t1.insert(), [{\"name\": \"some name 1\"}, {\"name\": \"some name 2\"}] ) async with engine.connect()", "use it using await within a coroutine. \"\"\" import asyncio from sqlalchemy import", "{\"name\": \"some name 2\"}] ) async with engine.connect() as conn: # the default", "MetaData from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.ext.asyncio import create_async_engine", "is an instance of AsyncConnection async with engine.begin() as conn: # to support", "result = await conn.execute(t1.select()) # the results are buffered so no await call", "/ connection interface. In this example, we have an async engine created by", "sqlalchemy import String from sqlalchemy import Table from sqlalchemy.ext.asyncio import create_async_engine meta =", "is an instance of AsyncEngine engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn", "method, # where synchronous IO calls will be transparently translated for # await.", "# where synchronous IO calls will be transparently translated for # await. await", "only segments of the # result at time, the AsyncConnection.stream() method is used.", "from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.ext.asyncio import create_async_engine meta", "# conn is an instance of AsyncConnection async with engine.begin() as conn: #", "async_result = await conn.stream(t1.select()) # this object supports async iteration and awaitable #", "pattern is used. await conn.execute( t1.insert(), [{\"name\": \"some name 1\"}, {\"name\": \"some name", "used. await conn.execute( t1.insert(), [{\"name\": \"some name 1\"}, {\"name\": \"some name 2\"}] )", "primary_key=True), Column(\"name\", String) ) async def async_main(): # engine is an instance of", "is the # sqlalchemy.engine.Result object result = await conn.execute(t1.select()) # the results are", "the results are buffered so no await call is necessary # for this", "of methods like .all(), fetchmany(), etc. async for row in async_result: print(row) asyncio.run(async_main())", "Column from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import String", "by :func:`_engine.create_async_engine`. We then use it using await within a coroutine. \"\"\" import", "version of the AsyncConnection object to any synchronous method, # where synchronous IO", "engine.connect() as conn: # the default result object is the # sqlalchemy.engine.Result object", "# for this case. print(result.fetchall()) # for a streaming result that buffers only", "so no await call is necessary # for this case. print(result.fetchall()) # for", "# pattern is used. await conn.execute( t1.insert(), [{\"name\": \"some name 1\"}, {\"name\": \"some", "of AsyncConnection async with engine.begin() as conn: # to support SQLAlchemy DDL methods", "method is used. # this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select())", "object. async_result = await conn.stream(t1.select()) # this object supports async iteration and awaitable", "import MetaData from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.ext.asyncio import", "functions, the # AsyncConnection.run_sync() awaitable method will pass a \"sync\" # version of", "= create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn is an instance of AsyncConnection async", ") # conn is an instance of AsyncConnection async with engine.begin() as conn:", "transparently translated for # await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal statement", "[{\"name\": \"some name 1\"}, {\"name\": \"some name 2\"}] ) async with engine.connect() as", "engine = create_async_engine( \"postgresql+asyncpg://scott:tiger@localhost/test\", echo=True, ) # conn is an instance of AsyncConnection", "coroutine. \"\"\" import asyncio from sqlalchemy import Column from sqlalchemy import Integer from", "\"sync\" # version of the AsyncConnection object to any synchronous method, # where", "execution, a traditional \"await execute()\" # pattern is used. await conn.execute( t1.insert(), [{\"name\":", "await conn.execute( t1.insert(), [{\"name\": \"some name 1\"}, {\"name\": \"some name 2\"}] ) async", "support SQLAlchemy DDL methods as well as legacy functions, the # AsyncConnection.run_sync() awaitable", "awaitable # versions of methods like .all(), fetchmany(), etc. async for row in", "any synchronous method, # where synchronous IO calls will be transparently translated for", "as conn: # the default result object is the # sqlalchemy.engine.Result object result", "# for a streaming result that buffers only segments of the # result", "name 1\"}, {\"name\": \"some name 2\"}] ) async with engine.connect() as conn: #", "the # sqlalchemy.engine.Result object result = await conn.execute(t1.select()) # the results are buffered", "results are buffered so no await call is necessary # for this case.", "object supports async iteration and awaitable # versions of methods like .all(), fetchmany(),", "have an async engine created by :func:`_engine.create_async_engine`. We then use it using await", "will be transparently translated for # await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for", "AsyncConnection async with engine.begin() as conn: # to support SQLAlchemy DDL methods as", "for this case. print(result.fetchall()) # for a streaming result that buffers only segments", "import Column from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import", "object is the # sqlalchemy.engine.Result object result = await conn.execute(t1.select()) # the results", "Column(\"name\", String) ) async def async_main(): # engine is an instance of AsyncEngine", "from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import String from", "the default result object is the # sqlalchemy.engine.Result object result = await conn.execute(t1.select())" ]
[ "requests.get(url, stream=True) sha256 = hashlib.sha256() print(f\"Calculating sha265sum of {url}\", end=\"\", flush=True) for chunk", "\"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\":", "= base_dir with template_file.open() as in_file: self.template = in_file.read() def source_url(self, release_version, package_name,", "package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\"", "f\"zivid-{name}\" class Pkgbuild: company_url = \"https://www.zivid.com\" description = \"Defining the Future of 3D", "\"https://www.zivid.com\" description = \"Defining the Future of 3D Machine Vision\" dependencies = {", "/ \"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) )", "(\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\": (), } def __init__(self, base_dir:", "request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name", "= requests.get(url, stream=True) sha256 = hashlib.sha256() print(f\"Calculating sha265sum of {url}\", end=\"\", flush=True) for", "\"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def", "from pathlib import Path import hashlib import argparse import requests def _sha256sum(url): request", "the Future of 3D Machine Vision\" dependencies = { \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\",", "Path): self.base_dir = base_dir with template_file.open() as in_file: self.template = in_file.read() def source_url(self,", "out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def main(): parser = argparse.ArgumentParser()", "def _ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild: company_url = \"https://www.zivid.com\"", "template_file.open() as in_file: self.template = in_file.read() def source_url(self, release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\"", "/ package_name out_dir.mkdir(parents=True) out_file_name = out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as", "__init__(self, base_dir: Path, template_file: Path): self.base_dir = base_dir with template_file.open() as in_file: self.template", "parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package, options.package_version)", "if name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild: company_url = \"https://www.zivid.com\" description = \"Defining the", "release_version, ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description,", "(), \"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\": (), } def __init__(self, base_dir: Path, template_file:", "description = \"Defining the Future of 3D Machine Vision\" dependencies = { \"zivid-telicam-driver\":", "Vision\" dependencies = { \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",),", "(\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (),", "package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name, package_version): source_url = self.source_url( release_version,", "print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def main():", "= _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name = out_dir / \"PKGBUILD\"", "parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package, options.package_version) print(\"Done\")", "chunk in request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name):", ") package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]),", "= \"Defining the Future of 3D Machine Vision\" dependencies = { \"zivid-telicam-driver\": (),", "(), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces =", "source_url(self, release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name, package_version): source_url =", "name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url),", "provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name)", "out_dir.mkdir(parents=True) out_file_name = out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as out_file: out_file.write(", "as in_file: self.template = in_file.read() def source_url(self, release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def", "print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if", "as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\")", "main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild", "package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\"", "name if name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild: company_url = \"https://www.zivid.com\" description = \"Defining", "requests def _sha256sum(url): request = requests.get(url, stream=True) sha256 = hashlib.sha256() print(f\"Calculating sha265sum of", "\"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\":", "flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild:", "out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\")", "for chunk in request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest() def", "ubuntu_package_name, package_version): source_url = self.source_url( release_version, ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return", "parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild =", "replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\": (),", "_ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\"", "def __init__(self, base_dir: Path, template_file: Path): self.base_dir = base_dir with template_file.open() as in_file:", "argparse import requests def _sha256sum(url): request = requests.get(url, stream=True) sha256 = hashlib.sha256() print(f\"Calculating", "return name if name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild: company_url = \"https://www.zivid.com\" description =", "flush=True) for chunk in request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest()", "argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template))", "name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild: company_url = \"https://www.zivid.com\" description = \"Defining the Future", "base_dir: Path, template_file: Path): self.base_dir = base_dir with template_file.open() as in_file: self.template =", "{url}\", end=\"\", flush=True) for chunk in request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True)", "{ \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\": (), } def", "= \"https://www.zivid.com\" description = \"Defining the Future of 3D Machine Vision\" dependencies =", "(), \"zivid-tools\": (), \"zivid-genicam\": (), } def __init__(self, base_dir: Path, template_file: Path): self.base_dir", "} replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\":", "_sha256sum(url): request = requests.get(url, stream=True) sha256 = hashlib.sha256() print(f\"Calculating sha265sum of {url}\", end=\"\",", "} def __init__(self, base_dir: Path, template_file: Path): self.base_dir = base_dir with template_file.open() as", ") def write(self, release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir /", "\"zivid-genicam\": (\"zivid\",), } replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (), \"zivid-tools\":", "description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self,", "self.base_dir = base_dir with template_file.open() as in_file: self.template = in_file.read() def source_url(self, release_version,", "ubuntu_package_name, package_version) ) def main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\")", "sha265sum of {url}\", end=\"\", flush=True) for chunk in request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk)", "self.source_url( release_version, ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"),", "_ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild: company_url = \"https://www.zivid.com\" description", "= self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name = out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\") with", "out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version)", "hashlib import argparse import requests def _sha256sum(url): request = requests.get(url, stream=True) sha256 =", "= { \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",),", "= in_file.read() def source_url(self, release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name,", "release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name, package_version): source_url = self.source_url(", "out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\")", "import hashlib import argparse import requests def _sha256sum(url): request = requests.get(url, stream=True) sha256", "\".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name, package_version):", "\"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces = {", "return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]),", "\"Defining the Future of 3D Machine Vision\" dependencies = { \"zivid-telicam-driver\": (), \"zivid\":", "in_file: self.template = in_file.read() def source_url(self, release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self,", "(\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces = { \"zivid-telicam-driver\":", "package_version) ) def main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options", "ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name =", "import Path import hashlib import argparse import requests def _sha256sum(url): request = requests.get(url,", "self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url,", "parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version,", "import argparse import requests def _sha256sum(url): request = requests.get(url, stream=True) sha256 = hashlib.sha256()", "Pkgbuild: company_url = \"https://www.zivid.com\" description = \"Defining the Future of 3D Machine Vision\"", "package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name = out_dir /", "def _sha256sum(url): request = requests.get(url, stream=True) sha256 = hashlib.sha256() print(f\"Calculating sha265sum of {url}\",", "source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir =", "parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package, options.package_version) print(\"Done\") if __name__ == \"__main__\":", "parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package, options.package_version) print(\"Done\") if", "class Pkgbuild: company_url = \"https://www.zivid.com\" description = \"Defining the Future of 3D Machine", "(), } def __init__(self, base_dir: Path, template_file: Path): self.base_dir = base_dir with template_file.open()", "in request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return", "\"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\": (), } def __init__(self,", "package_name out_dir.mkdir(parents=True) out_file_name = out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as out_file:", "write(self, release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name out_dir.mkdir(parents=True)", "source_url = self.source_url( release_version, ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name,", "\"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\": (), } def __init__(self, base_dir: Path, template_file: Path):", "sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\") else f\"zivid-{name}\"", "package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name, package_version): source_url = self.source_url( release_version, ubuntu_package_name,", "= argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir),", "Machine Vision\" dependencies = { \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\":", "base_dir with template_file.open() as in_file: self.template = in_file.read() def source_url(self, release_version, package_name, package_version):", "conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name, package_version): package_name", "(\"zivid\",), } replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (), \"zivid-tools\": (),", "release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name", "= parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package, options.package_version) print(\"Done\") if __name__ ==", "\"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def", "package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name = out_dir", "sha256 = hashlib.sha256() print(f\"Calculating sha265sum of {url}\", end=\"\", flush=True) for chunk in request.iter_content(1000000):", "print(f\"Calculating sha265sum of {url}\", end=\"\", flush=True) for chunk in request.iter_content(1000000): print(\".\", end=\"\", flush=True)", "_ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name = out_dir / \"PKGBUILD\" print(f\"Writing", "3D Machine Vision\" dependencies = { \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",),", "pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), )", "flush=True) sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\") else", "Future of 3D Machine Vision\" dependencies = { \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"),", "= hashlib.sha256() print(f\"Calculating sha265sum of {url}\", end=\"\", flush=True) for chunk in request.iter_content(1000000): print(\".\",", "with out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def main(): parser =", "parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package,", "def source_url(self, release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name, package_version): source_url", "Path, template_file: Path): self.base_dir = base_dir with template_file.open() as in_file: self.template = in_file.read()", "\"zivid-tools\": (), \"zivid-genicam\": (), } def __init__(self, base_dir: Path, template_file: Path): self.base_dir =", "pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package, options.package_version) print(\"Done\") if __name__ == \"__main__\": main()", "out_dir = self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name = out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\")", "hashlib.sha256() print(f\"Calculating sha265sum of {url}\", end=\"\", flush=True) for chunk in request.iter_content(1000000): print(\".\", end=\"\",", "(\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (),", "\".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name, package_version): package_name =", ") def main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options =", "options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package, options.package_version) print(\"Done\") if __name__", "ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url,", "package_version): source_url = self.source_url( release_version, ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format(", "def write(self, release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name", "of {url}\", end=\"\", flush=True) for chunk in request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\",", "dependencies = { \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\":", "\".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir", "sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir", "self.configure(release_version, ubuntu_package_name, package_version) ) def main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\")", "<reponame>zivid/arch-linux-pkgbuild-generator<filename>scripts/generate_pkgbuild.py from pathlib import Path import hashlib import argparse import requests def _sha256sum(url):", "f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name, package_version): source_url = self.source_url( release_version, ubuntu_package_name, package_version )", "(), \"zivid-genicam\": (), } def __init__(self, base_dir: Path, template_file: Path): self.base_dir = base_dir", "def configure(self, release_version, ubuntu_package_name, package_version): source_url = self.source_url( release_version, ubuntu_package_name, package_version ) package_name", "dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name,", "stream=True) sha256 = hashlib.sha256() print(f\"Calculating sha265sum of {url}\", end=\"\", flush=True) for chunk in", "= _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\", \"_\"), description=self.description, url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]),", "sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild: company_url =", "def main(): parser = argparse.ArgumentParser() parser.add_argument(\"--out-dir\") parser.add_argument(\"--template\") parser.add_argument(\"--release-version\") parser.add_argument(\"--package\") parser.add_argument(\"--package-version\") options = parser.parse_args()", "print(\"done\", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\") else f\"zivid-{name}\" class", "company_url = \"https://www.zivid.com\" description = \"Defining the Future of 3D Machine Vision\" dependencies", "end=\"\", flush=True) for chunk in request.iter_content(1000000): print(\".\", end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True) return", "pathlib import Path import hashlib import argparse import requests def _sha256sum(url): request =", "url=self.company_url, dependencies=\" \".join(self.dependencies[package_name]), conflicts=\" \".join(self.replaces[package_name]), provides=\" \".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version,", "else f\"zivid-{name}\" class Pkgbuild: company_url = \"https://www.zivid.com\" description = \"Defining the Future of", "\"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces", "= out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name,", "\"zivid-genicam\": (), } def __init__(self, base_dir: Path, template_file: Path): self.base_dir = base_dir with", "end=\"\", flush=True) sha256.update(chunk) print(\"done\", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\")", "template_file: Path): self.base_dir = base_dir with template_file.open() as in_file: self.template = in_file.read() def", "\"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), } replaces = { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",),", "import requests def _sha256sum(url): request = requests.get(url, stream=True) sha256 = hashlib.sha256() print(f\"Calculating sha265sum", "of 3D Machine Vision\" dependencies = { \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\":", "= self.source_url( release_version, ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace(\"-\",", "= { \"zivid-telicam-driver\": (\"zivid-telicam-sdk\",), \"zivid\": (), \"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\": (), }", "return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name, package_version): source_url = self.source_url( release_version, ubuntu_package_name, package_version", "self.template = in_file.read() def source_url(self, release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version,", "release_version, ubuntu_package_name, package_version): source_url = self.source_url( release_version, ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name)", "Path import hashlib import argparse import requests def _sha256sum(url): request = requests.get(url, stream=True)", "self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name = out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\")", "in_file.read() def source_url(self, release_version, package_name, package_version): return f\"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb\" def configure(self, release_version, ubuntu_package_name, package_version):", "return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if name.startswith(\"zivid\") else f\"zivid-{name}\" class Pkgbuild: company_url", "request = requests.get(url, stream=True) sha256 = hashlib.sha256() print(f\"Calculating sha265sum of {url}\", end=\"\", flush=True)", "with template_file.open() as in_file: self.template = in_file.read() def source_url(self, release_version, package_name, package_version): return", "{out_file_name}\") with out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def main(): parser", "\"zivid\": (), \"zivid-studio\": (), \"zivid-tools\": (), \"zivid-genicam\": (), } def __init__(self, base_dir: Path,", "configure(self, release_version, ubuntu_package_name, package_version): source_url = self.source_url( release_version, ubuntu_package_name, package_version ) package_name =", "{ \"zivid-telicam-driver\": (), \"zivid\": (\"zivid-telicam-driver\", \"opencl-driver\"), \"zivid-studio\": (\"zivid\",), \"zivid-tools\": (\"zivid\",), \"zivid-genicam\": (\"zivid\",), }", "out_file_name = out_dir / \"PKGBUILD\" print(f\"Writing {out_file_name}\") with out_file_name.open(\"w\") as out_file: out_file.write( self.configure(release_version," ]
[ "os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This should match to the variable name used", "to the variable name used in the tahmoapi. RAIN = \"precipitation\" TEMP =", "ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This should match to the variable", "used in the tahmoapi. RAIN = \"precipitation\" TEMP = \"temperature\" REL = \"humidity\"", "Weather variable vocublary. This should match to the variable name used in the", "match to the variable name used in the tahmoapi. RAIN = \"precipitation\" TEMP", "dir import os ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This should match", "os ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This should match to the", "Root dir import os ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This should", "= os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This should match to the variable name", "This should match to the variable name used in the tahmoapi. RAIN =", "# Root dir import os ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This", "variable vocublary. This should match to the variable name used in the tahmoapi.", "tahmoapi. RAIN = \"precipitation\" TEMP = \"temperature\" REL = \"humidity\" WINDR= \"winddirection\" SRAD", "name used in the tahmoapi. RAIN = \"precipitation\" TEMP = \"temperature\" REL =", "the variable name used in the tahmoapi. RAIN = \"precipitation\" TEMP = \"temperature\"", "vocublary. This should match to the variable name used in the tahmoapi. RAIN", "variable name used in the tahmoapi. RAIN = \"precipitation\" TEMP = \"temperature\" REL", "should match to the variable name used in the tahmoapi. RAIN = \"precipitation\"", "in the tahmoapi. RAIN = \"precipitation\" TEMP = \"temperature\" REL = \"humidity\" WINDR=", "= \"precipitation\" TEMP = \"temperature\" REL = \"humidity\" WINDR= \"winddirection\" SRAD = \"radiation\"", "import os ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This should match to", "# Weather variable vocublary. This should match to the variable name used in", "RAIN = \"precipitation\" TEMP = \"temperature\" REL = \"humidity\" WINDR= \"winddirection\" SRAD =", "the tahmoapi. RAIN = \"precipitation\" TEMP = \"temperature\" REL = \"humidity\" WINDR= \"winddirection\"" ]
[ "('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField', ), migrations.DeleteModel( name='FormIndexPage', ),", "molo.core.models import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailforms',", "[ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations = [", "), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()),", "'0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations = [ migrations.RemoveField( model_name='formfield', name='page', ),", "on 2019-07-23 19:11 from __future__ import unicode_literals from django.db import migrations import molo.core.blocks", "name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page',", "unicode_literals from django.db import migrations import molo.core.blocks import molo.core.models import wagtail.core.blocks import wagtail.core.fields", "wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField', ), migrations.DeleteModel( name='FormIndexPage', ), migrations.DeleteModel( name='FormPage', ),", "import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'),", "('core', '0017_add_google_search_console'), ] operations = [ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language',", "('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField', ), migrations.DeleteModel( name='FormIndexPage', ), migrations.DeleteModel( name='FormPage',", "1.11.18 on 2019-07-23 19:11 from __future__ import unicode_literals from django.db import migrations import", "migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField(", "migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list',", "('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField',", "wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField', ), migrations.DeleteModel( name='FormIndexPage', ), migrations.DeleteModel(", "model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')),", "wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())],", "model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()),", "('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations = [ migrations.RemoveField(", "19:11 from __future__ import unicode_literals from django.db import migrations import molo.core.blocks import molo.core.models", "model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage',", "'0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations = [ migrations.RemoveField( model_name='formfield',", "utf-8 -*- # Generated by Django 1.11.18 on 2019-07-23 19:11 from __future__ import", "wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel(", "import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'),", "), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading',", "# -*- coding: utf-8 -*- # Generated by Django 1.11.18 on 2019-07-23 19:11", "# Generated by Django 1.11.18 on 2019-07-23 19:11 from __future__ import unicode_literals from", "model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))),", "migrations import molo.core.blocks import molo.core.models import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration):", "migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full", "('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True,", "from __future__ import unicode_literals from django.db import migrations import molo.core.blocks import molo.core.models import", "Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ]", "import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'),", "('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html',", "__future__ import unicode_literals from django.db import migrations import molo.core.blocks import molo.core.models import wagtail.core.blocks", "molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()),", "coding: utf-8 -*- # Generated by Django 1.11.18 on 2019-07-23 19:11 from __future__", "-*- # Generated by Django 1.11.18 on 2019-07-23 19:11 from __future__ import unicode_literals", "('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext',", "-*- coding: utf-8 -*- # Generated by Django 1.11.18 on 2019-07-23 19:11 from", "('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations = [ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField(", "title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')),", "model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage',", "), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))),", "name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list',", "molo.core.blocks import molo.core.models import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies =", "('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ),", "migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image',", "dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations", "('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations = [ migrations.RemoveField( model_name='formfield', name='page',", "), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ),", "name='page', ), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages',", "name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph',", "'0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations = [ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage',", "= [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations =", "class Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'),", "wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore',", "wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects',", "molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField', ), migrations.DeleteModel( name='FormIndexPage',", "Generated by Django 1.11.18 on 2019-07-23 19:11 from __future__ import unicode_literals from django.db", "[ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ),", "wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core',", "wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media',", "blank=True, null=True), ), migrations.DeleteModel( name='FormField', ), migrations.DeleteModel( name='FormIndexPage', ), migrations.DeleteModel( name='FormPage', ), ]", "wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True),", "from django.db import migrations import molo.core.blocks import molo.core.models import wagtail.core.blocks import wagtail.core.fields import", "= [ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr',", "import migrations import molo.core.blocks import molo.core.models import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class", "import molo.core.blocks import molo.core.models import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies", "2019-07-23 19:11 from __future__ import unicode_literals from django.db import migrations import molo.core.blocks import", "import unicode_literals from django.db import migrations import molo.core.blocks import molo.core.models import wagtail.core.blocks import", "wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField', ),", "migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField(", "name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body',", "import molo.core.models import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [", "Django 1.11.18 on 2019-07-23 19:11 from __future__ import unicode_literals from django.db import migrations", "] operations = [ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField(", "django.db import migrations import molo.core.blocks import molo.core.models import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks", "by Django 1.11.18 on 2019-07-23 19:11 from __future__ import unicode_literals from django.db import", "('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField', ), migrations.DeleteModel(", "field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()),", "'0017_add_google_search_console'), ] operations = [ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language', ),", "operations = [ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage'," ]
[ "a model \"\"\" query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization':", "KIND, either express or implied. # See the License for the specific language", "versions of a model \"\"\" query_string = [('name', 'name_example')] headers = { 'Accept':", "Unless required by applicable law or agreed to in writing, software # distributed", "method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_get_model_version(self):", "import unittest from flask import json from six import BytesIO from apis_server.test import", "\"\"\"ModelsController integration test stubs\"\"\" def test_create_model(self): \"\"\"Test case for create_model Create a model", "content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test", "response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case for update_model Update a model \"\"\" inline_object5_serializer =", "'Bearer special-key', } response = self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response", ": ' + response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case for retrieve_model Retrieve a model", "'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key',", "inline_object6_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept':", "headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/get',", "'version_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response", "special-key', } response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is", "'version_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open(", "response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case for delete_model Delete a model \"\"\" query_string =", "six import BytesIO from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\"", "this file except in compliance with the License. # You may obtain a", "= self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' +", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string)", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case", "test_retrieve_model(self): \"\"\"Test case for retrieve_model Retrieve a model \"\"\" query_string = [('name', 'name_example')]", "ANY KIND, either express or implied. # See the License for the specific", "\"\"\" inline_object6_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers = {", "is : ' + response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case for delete_model_version delete a", "query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer", "'/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case", "'/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' +", "headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/get',", "{} headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response", "response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case for delete_model_version delete a model version \"\"\" query_string", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "def test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage Transition model version stage \"\"\" inline_object7_serializer =", "of a model \"\"\" query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json',", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json',", "'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers,", "' + response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case for update_model_version update a model version", "body is : ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage Transition", "response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case for get_model_version get a model version \"\"\" query_string", "method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case for update_model_version update a model version \"\"\" inline_object6_serializer", "This file is part of Ilyde. # # Licensed under the Apache License,", "{ 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/get', method='GET', headers=headers,", "content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test", "delete a model version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')] headers =", "self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "is : ' + response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case for list_model_versions list versions", "OF ANY KIND, either express or implied. # See the License for the", "update_model_version update a model version \"\"\" inline_object6_serializer = {} query_string = [('name', 'name_example'),", "from __future__ import absolute_import import unittest from flask import json from six import", "test_get_model_version(self): \"\"\"Test case for get_model_version get a model version \"\"\" query_string = [('name',", "model version stage \"\"\" inline_object7_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')]", "{ 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers,", "body is : ' + response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case for delete_model Delete", "'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer),", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case for", ": ' + response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case for update_model_version update a model", "= { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/delete', method='DELETE',", "'Response body is : ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage", "method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model_version(self):", "' + response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case for delete_model_version delete a model version", "import absolute_import import unittest from flask import json from six import BytesIO from", "\"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Authorization':", "class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\" def test_create_model(self): \"\"\"Test case for create_model Create", "[('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', }", "def test_get_model_version(self): \"\"\"Test case for get_model_version get a model version \"\"\" query_string =", "= { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/list', method='GET',", "data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model(self):", "+ response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case for get_model_version get a model version \"\"\"", "} response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body", "= self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is : '", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string)", "model \"\"\" model_serializer = {} headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization':", "'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body", ": ' + response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case for create_model_version Create a model", "'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer),", "\"\"\"Test case for update_model_version update a model version \"\"\" inline_object6_serializer = {} query_string", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "= { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/delete', method='DELETE',", "stage \"\"\" inline_object7_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers =", "' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage Transition model version stage", "= {} headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', }", "'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response,", "self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "case for delete_model Delete a model \"\"\" query_string = [('name', 'name_example')] headers =", "for update_model_version update a model version \"\"\" inline_object6_serializer = {} query_string = [('name',", "special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models', method='POST', headers=headers,", "# # This file is part of Ilyde. # # Licensed under the", "'Response body is : ' + response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case for update_model_version", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "response = self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : '", "for delete_model_version delete a model version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')]", "{ 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers,", "for get_model_version get a model version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')]", "\"\"\"Test case for transition_model_version_stage Transition model version stage \"\"\" inline_object7_serializer = {} query_string", "response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case", "required by applicable law or agreed to in writing, software # distributed under", "version \"\"\" inline_object6_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers =", "for retrieve_model Retrieve a model \"\"\" query_string = [('name', 'name_example')] headers = {", ": ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage Transition model version", "integration test stubs\"\"\" def test_create_model(self): \"\"\"Test case for create_model Create a model \"\"\"", "applicable law or agreed to in writing, software # distributed under the License", "model_serializer = {} headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key',", "' + response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case for retrieve_model Retrieve a model \"\"\"", "{ 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/create',", "is : ' + response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case for update_model_version update a", "case for create_model Create a model \"\"\" model_serializer = {} headers = {", "is part of Ilyde. # # Licensed under the Apache License, Version 2.0", "special-key', } response = self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is", "flask import json from six import BytesIO from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase):", "or agreed to in writing, software # distributed under the License is distributed", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json',", "self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : '", "= [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization':", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "def test_update_model_version(self): \"\"\"Test case for update_model_version update a model version \"\"\" inline_object6_serializer =", "response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : '", "import BytesIO from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\" def", "'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response,", "def test_update_model(self): \"\"\"Test case for update_model Update a model \"\"\" inline_object5_serializer = {}", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "'/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' +", "writing, software # distributed under the License is distributed on an \"AS IS\"", "= { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/get', method='GET',", "\"\"\" inline_object5_serializer = {} query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json',", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "absolute_import import unittest from flask import json from six import BytesIO from apis_server.test", "License. # You may obtain a copy of the License at # #", "' + response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case for list_model_versions list versions of a", "= self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' +", "test_create_model(self): \"\"\"Test case for create_model Create a model \"\"\" model_serializer = {} headers", "\"\"\"Test case for create_model Create a model \"\"\" model_serializer = {} headers =", "get_model_version get a model version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')] headers", "# coding: utf-8 # # Copyright (c) 2020-2021 Hopenly srl. # # This", "response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is", "for transition_model_version_stage Transition model version stage \"\"\" inline_object7_serializer = {} query_string = [('name',", "compliance with the License. # You may obtain a copy of the License", "stubs\"\"\" def test_create_model(self): \"\"\"Test case for create_model Create a model \"\"\" model_serializer =", "'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response", "'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string)", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json',", "inline_object7_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept':", "for update_model Update a model \"\"\" inline_object5_serializer = {} query_string = [('name', 'name_example')]", "version stage \"\"\" inline_object7_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers", "= self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' +", "list_model_versions list versions of a model \"\"\" query_string = [('name', 'name_example')] headers =", "('version', 'version_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response =", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "\"\"\" query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key',", "Retrieve a model \"\"\" query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json',", "} response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is :", "+ response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case for delete_model Delete a model \"\"\" query_string", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case", "'Response body is : ' + response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case for update_model", "limitations under the License. # from __future__ import absolute_import import unittest from flask", "self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is : ' +", "content_type='application/json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case", "response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is", "import json from six import BytesIO from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController", "# This file is part of Ilyde. # # Licensed under the Apache", "transition_model_version_stage Transition model version stage \"\"\" inline_object7_serializer = {} query_string = [('name', 'name_example'),", "{ 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers,", "case for delete_model_version delete a model version \"\"\" query_string = [('name', 'name_example'), ('version',", "} response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is :", "not use this file except in compliance with the License. # You may", "delete_model Delete a model \"\"\" query_string = [('name', 'name_example')] headers = { 'Accept':", "model version \"\"\" model_version_serializer = {} query_string = [('name', 'name_example')] headers = {", "def test_create_model_version(self): \"\"\"Test case for create_model_version Create a model version \"\"\" model_version_serializer =", "special-key', } response = self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body", "headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "License, Version 2.0 (the \"License\"); # you may not use this file except", "= self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is :", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string)", "\"\"\"Test case for delete_model Delete a model \"\"\" query_string = [('name', 'name_example')] headers", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) if __name__ == '__main__':", "retrieve_model Retrieve a model \"\"\" query_string = [('name', 'name_example')] headers = { 'Accept':", "'/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' +", "query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer", "'/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' +", "'Response body is : ' + response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case for retrieve_model", "case for retrieve_model Retrieve a model \"\"\" query_string = [('name', 'name_example')] headers =", "# you may not use this file except in compliance with the License.", "# # Copyright (c) 2020-2021 Hopenly srl. # # This file is part", "\"\"\"Test case for update_model Update a model \"\"\" inline_object5_serializer = {} query_string =", "test_update_model_version(self): \"\"\"Test case for update_model_version update a model version \"\"\" inline_object6_serializer = {}", "agreed to in writing, software # distributed under the License is distributed on", "case for get_model_version get a model version \"\"\" query_string = [('name', 'name_example'), ('version',", "(the \"License\"); # you may not use this file except in compliance with", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response", "list versions of a model \"\"\" query_string = [('name', 'name_example')] headers = {", "Transition model version stage \"\"\" inline_object7_serializer = {} query_string = [('name', 'name_example'), ('version',", "# Unless required by applicable law or agreed to in writing, software #", "model version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept':", "by applicable law or agreed to in writing, software # distributed under the", "} response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is :", "= self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is :", "} response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "} response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body", "'/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers,", "= {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json',", "special-key', } response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is", ": ' + response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case for get_model_version get a model", "inline_object5_serializer = {} query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type':", "headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/list',", "headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test", "of Ilyde. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "for list_model_versions list versions of a model \"\"\" query_string = [('name', 'name_example')] headers", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case for", "\"\"\"Test case for list_model_versions list versions of a model \"\"\" query_string = [('name',", "file except in compliance with the License. # You may obtain a copy", "part of Ilyde. # # Licensed under the Apache License, Version 2.0 (the", "\"\"\"Test case for retrieve_model Retrieve a model \"\"\" query_string = [('name', 'name_example')] headers", "is : ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage Transition model", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response,", "for delete_model Delete a model \"\"\" query_string = [('name', 'name_example')] headers = {", "+ response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage Transition model version stage \"\"\"", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response,", "License for the specific language governing permissions and # limitations under the License.", "delete_model_version delete a model version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')] headers", "Copyright (c) 2020-2021 Hopenly srl. # # This file is part of Ilyde.", "= self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' +", "self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : '", "test_delete_model(self): \"\"\"Test case for delete_model Delete a model \"\"\" query_string = [('name', 'name_example')]", "to in writing, software # distributed under the License is distributed on an", "implied. # See the License for the specific language governing permissions and #", "[('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response =", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json',", "import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\" def test_create_model(self): \"\"\"Test case for", "body is : ' + response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case for update_model Update", "test stubs\"\"\" def test_create_model(self): \"\"\"Test case for create_model Create a model \"\"\" model_serializer", "} response = self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is :", "'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer),", "data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model(self):", "and # limitations under the License. # from __future__ import absolute_import import unittest", "or implied. # See the License for the specific language governing permissions and", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response,", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "special-key', } response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response", "+ response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case for retrieve_model Retrieve a model \"\"\" query_string", "a model \"\"\" model_serializer = {} headers = { 'Accept': 'application/json', 'Content-Type': 'application/json',", "headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response =", "is : ' + response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case for create_model_version Create a", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "{ 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models',", "data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response", "body is : ' + response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case for list_model_versions list", "body is : ' + response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case for update_model_version update", "{ 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/update',", "method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_list_model_versions(self):", "headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "\"\"\" model_serializer = {} headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer", "headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test", "= self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is :", "def test_delete_model_version(self): \"\"\"Test case for delete_model_version delete a model version \"\"\" query_string =", "create_model Create a model \"\"\" model_serializer = {} headers = { 'Accept': 'application/json',", "'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body", "+ response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case for update_model Update a model \"\"\" inline_object5_serializer", "'Bearer special-key', } response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body", "'Response body is : ' + response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case for list_model_versions", ": ' + response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case for delete_model Delete a model", "+ response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case for create_model_version Create a model version \"\"\"", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "case for transition_model_version_stage Transition model version stage \"\"\" inline_object7_serializer = {} query_string =", "test_delete_model_version(self): \"\"\"Test case for delete_model_version delete a model version \"\"\" query_string = [('name',", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case", "srl. # # This file is part of Ilyde. # # Licensed under", "= { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open(", "Update a model \"\"\" inline_object5_serializer = {} query_string = [('name', 'name_example')] headers =", "'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/update', method='PATCH',", ": ' + response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case for delete_model_version delete a model", "self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : '", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case for", "response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is", "(c) 2020-2021 Hopenly srl. # # This file is part of Ilyde. #", "special-key', } response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is", "a model \"\"\" inline_object5_serializer = {} query_string = [('name', 'name_example')] headers = {", "case for list_model_versions list versions of a model \"\"\" query_string = [('name', 'name_example')]", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case for", "use this file except in compliance with the License. # You may obtain", "data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model_version(self):", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case for", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response,", "language governing permissions and # limitations under the License. # from __future__ import", "'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string)", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "version \"\"\" model_version_serializer = {} query_string = [('name', 'name_example')] headers = { 'Accept':", ": ' + response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case for list_model_versions list versions of", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case for", "the specific language governing permissions and # limitations under the License. # from", "response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case for create_model_version Create a model version \"\"\" model_version_serializer", "is : ' + response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case for delete_model Delete a", "2020-2021 Hopenly srl. # # This file is part of Ilyde. # #", "unittest from flask import json from six import BytesIO from apis_server.test import BaseTestCase", "'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body", "headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "2.0 (the \"License\"); # you may not use this file except in compliance", "# Copyright (c) 2020-2021 Hopenly srl. # # This file is part of", "' + response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case for update_model Update a model \"\"\"", "__future__ import absolute_import import unittest from flask import json from six import BytesIO", "for the specific language governing permissions and # limitations under the License. #", "\"\"\"Test case for create_model_version Create a model version \"\"\" model_version_serializer = {} query_string", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers,", "TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\" def test_create_model(self): \"\"\"Test case for create_model Create a", "is : ' + response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case for get_model_version get a", "'/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "# # Unless required by applicable law or agreed to in writing, software", "model version \"\"\" inline_object6_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers", "express or implied. # See the License for the specific language governing permissions", "'Bearer special-key', } response = self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body", "body is : ' + response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case for delete_model_version delete", "'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/create', method='POST',", "response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : '", ": ' + response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case for update_model Update a model", "update a model version \"\"\" inline_object6_serializer = {} query_string = [('name', 'name_example'), ('version',", "\"\"\" inline_object7_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers = {", "either express or implied. # See the License for the specific language governing", "= [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key',", "{ 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers,", "{} query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization':", "'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open(", "= [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response", "from six import BytesIO from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test", "= self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' +", "get a model version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')] headers =", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer),", "apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\" def test_create_model(self): \"\"\"Test case", "test_create_model_version(self): \"\"\"Test case for create_model_version Create a model version \"\"\" model_version_serializer = {}", "response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case for retrieve_model Retrieve a model \"\"\" query_string =", "the License. # You may obtain a copy of the License at #", "} response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is :", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case", "from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\" def test_create_model(self): \"\"\"Test", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "is : ' + response.data.decode('utf-8')) def test_update_model(self): \"\"\"Test case for update_model Update a", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Ilyde. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', }", "headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test", "body is : ' + response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case for get_model_version get", "# limitations under the License. # from __future__ import absolute_import import unittest from", "'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers,", "' + response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case for create_model_version Create a model version", "\"\"\" model_version_serializer = {} query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json',", "is : ' + response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case for retrieve_model Retrieve a", "test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage Transition model version stage \"\"\" inline_object7_serializer = {}", "def test_create_model(self): \"\"\"Test case for create_model Create a model \"\"\" model_serializer = {}", "body is : ' + response.data.decode('utf-8')) def test_retrieve_model(self): \"\"\"Test case for retrieve_model Retrieve", "Hopenly srl. # # This file is part of Ilyde. # # Licensed", "Delete a model \"\"\" query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json',", "'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string)", "'/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "License. # from __future__ import absolute_import import unittest from flask import json from", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response", "= { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/get', method='GET',", "response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case for list_model_versions list versions of a model \"\"\"", "[('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer", "with the License. # You may obtain a copy of the License at", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "governing permissions and # limitations under the License. # from __future__ import absolute_import", "= {} query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json',", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "coding: utf-8 # # Copyright (c) 2020-2021 Hopenly srl. # # This file", "special-key', } response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response", "data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) if __name__", "test_list_model_versions(self): \"\"\"Test case for list_model_versions list versions of a model \"\"\" query_string =", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH',", "under the License. # from __future__ import absolute_import import unittest from flask import", "def test_retrieve_model(self): \"\"\"Test case for retrieve_model Retrieve a model \"\"\" query_string = [('name',", "headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/delete',", "BytesIO from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\" def test_create_model(self):", "' + response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case for get_model_version get a model version", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "+ response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case for list_model_versions list versions of a model", "+ response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case for update_model_version update a model version \"\"\"", "headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) if", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "'Response body is : ' + response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case for delete_model_version", "self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"\"\"Test case for get_model_version get a model version \"\"\" query_string = [('name', 'name_example'),", "model_version_serializer = {} query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type':", "content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) if __name__ ==", "method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_retrieve_model(self):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) if __name__ == '__main__': unittest.main()", "content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test", "def test_delete_model(self): \"\"\"Test case for delete_model Delete a model \"\"\" query_string = [('name',", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string)", "from flask import json from six import BytesIO from apis_server.test import BaseTestCase class", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case for", "method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/delete',", "for create_model_version Create a model version \"\"\" model_version_serializer = {} query_string = [('name',", "} response = self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is", "'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string)", "= self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is :", "response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : '", "a model version \"\"\" query_string = [('name', 'name_example'), ('version', 'version_example')] headers = {", "'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer),", "= [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key',", "method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self):", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case for", "'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response,", "response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : '", "method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "'name_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response", "' + response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case for delete_model Delete a model \"\"\"", "test_update_model(self): \"\"\"Test case for update_model Update a model \"\"\" inline_object5_serializer = {} query_string", "'Response body is : ' + response.data.decode('utf-8')) def test_delete_model(self): \"\"\"Test case for delete_model", "the License. # from __future__ import absolute_import import unittest from flask import json", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "{} query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Content-Type':", "method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "{ 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage',", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json')", "'Response body is : ' + response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test case for get_model_version", "\"\"\"Test case for delete_model_version delete a model version \"\"\" query_string = [('name', 'name_example'),", "response = self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is :", "self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model_version(self): \"\"\"Test case for", "model \"\"\" query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer", "model \"\"\" inline_object5_serializer = {} query_string = [('name', 'name_example')] headers = { 'Accept':", "self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))", "self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : '", "'/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "body is : ' + response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case for create_model_version Create", "headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test", "Create a model version \"\"\" model_version_serializer = {} query_string = [('name', 'name_example')] headers", "specific language governing permissions and # limitations under the License. # from __future__", "a model version \"\"\" inline_object6_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')]", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json',", "BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration test stubs\"\"\" def test_create_model(self): \"\"\"Test case for create_model", "'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string)", "update_model Update a model \"\"\" inline_object5_serializer = {} query_string = [('name', 'name_example')] headers", "{ 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/update',", "'Bearer special-key', } response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response,", "case for update_model Update a model \"\"\" inline_object5_serializer = {} query_string = [('name',", "special-key', } response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response", "for create_model Create a model \"\"\" model_serializer = {} headers = { 'Accept':", "response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case for transition_model_version_stage Transition model version stage \"\"\" inline_object7_serializer", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): \"\"\"Test case", "headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_create_model_version(self):", "'Response body is : ' + response.data.decode('utf-8')) def test_create_model_version(self): \"\"\"Test case for create_model_version", "[('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', }", "case for update_model_version update a model version \"\"\" inline_object6_serializer = {} query_string =", "+ response.data.decode('utf-8')) def test_delete_model_version(self): \"\"\"Test case for delete_model_version delete a model version \"\"\"", "query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json',", "permissions and # limitations under the License. # from __future__ import absolute_import import", "('version', 'version_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', }", "Create a model \"\"\" model_serializer = {} headers = { 'Accept': 'application/json', 'Content-Type':", "case for create_model_version Create a model version \"\"\" model_version_serializer = {} query_string =", "query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_list_model_versions(self): \"\"\"Test case", "file is part of Ilyde. # # Licensed under the Apache License, Version", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models', method='POST',", "'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/update', method='PATCH',", "create_model_version Create a model version \"\"\" model_version_serializer = {} query_string = [('name', 'name_example')]", "def test_list_model_versions(self): \"\"\"Test case for list_model_versions list versions of a model \"\"\" query_string", "utf-8 # # Copyright (c) 2020-2021 Hopenly srl. # # This file is", "special-key', } response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is", "'/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def", "headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_get_model_version(self): \"\"\"Test", "# from __future__ import absolute_import import unittest from flask import json from six", "json from six import BytesIO from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): \"\"\"ModelsController integration", "a model version \"\"\" model_version_serializer = {} query_string = [('name', 'name_example')] headers =", "'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response,", "} response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body" ]
[ "record state transitions.\"\"\" import json from flask_login import current_user from invenio_rest.errors import RESTException", "FSMException(RESTException): \"\"\"Base Exception for OArepo FSM module, inherit, don't raise.\"\"\" code = 400", "\"\"\"Initialize exception.\"\"\" self.description = ( \"This transition is not permitted \" \"for your", "# oarepo-fsm is free software; you can redistribute it and/or modify it under", "library for record state transitions.\"\"\" import json from flask_login import current_user from invenio_rest.errors", "of record state is attempted.\"\"\" code = 403 def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\"", "# the terms of the MIT License; see LICENSE file for more details.", "request body.\"\"\" body = dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors = self.get_errors()", "exception.\"\"\" self.description = ( \"Direct modification of state is not allowed.\" ) super().__init__(**kwargs)", "when a direct modification of record state is attempted.\"\"\" code = 403 def", "under # the terms of the MIT License; see LICENSE file for more", "# # oarepo-fsm is free software; you can redistribute it and/or modify it", "404 def __init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition {} is", "__init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition {} is not available", "errors if self.code and (self.code >= 500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id)", "\"\"\"OArepo FSM library for record state transitions.\"\"\" import json from flask_login import current_user", "str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required parameter is missing.\"\"\" class", "\"Transition {} is not available on this record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised", "satisfied for transition.\"\"\" code = 403 def __init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description", "is free software; you can redistribute it and/or modify it under # the", "is not available to current user.\"\"\" code = 404 def __init__(self, transition=None, **kwargs):", "403 def __init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"This transition is", "self.get_errors() if self.errors: body[\"errors\"] = errors if self.code and (self.code >= 500) and", "= ( \"This transition is not permitted \" \"for your user {}. Required:", "= dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors = self.get_errors() if self.errors: body[\"errors\"]", "FSM library for record state transitions.\"\"\" import json from flask_login import current_user from", "state is attempted.\"\"\" code = 403 def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description =", "= 403 def __init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"This transition", "self.description = ( \"Transition {} is not available on this record\".format(transition) ) super().__init__(**kwargs)", "direct modification of record state is attempted.\"\"\" code = 403 def __init__(self, **kwargs):", "if self.errors: body[\"errors\"] = errors if self.code and (self.code >= 500) and hasattr(g,", "allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested transition is not available", "Copyright (C) 2020 CESNET. # # oarepo-fsm is free software; you can redistribute", "CESNET. # # oarepo-fsm is free software; you can redistribute it and/or modify", "oarepo-fsm is free software; you can redistribute it and/or modify it under #", "can redistribute it and/or modify it under # the terms of the MIT", "Exception for OArepo FSM module, inherit, don't raise.\"\"\" code = 400 @property def", "body.\"\"\" body = dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors = self.get_errors() if", "error_module=\"OArepo-FSM\", error_class=self.name, ) errors = self.get_errors() if self.errors: body[\"errors\"] = errors if self.code", "requested transition is not available to current user.\"\"\" code = 404 def __init__(self,", "for more details. \"\"\"OArepo FSM library for record state transitions.\"\"\" import json from", "transition is not available to current user.\"\"\" code = 404 def __init__(self, transition=None,", "__init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"This transition is not permitted", "of the record is invalid for transition.\"\"\" def __init__(self, source=None, target=None, **kwargs): \"\"\"Initialize", "parameter is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when a direct modification of record state", "**kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition {} is not available on this", "error_class=self.name, ) errors = self.get_errors() if self.errors: body[\"errors\"] = errors if self.code and", "it under # the terms of the MIT License; see LICENSE file for", "InvalidSourceStateError(FSMException): \"\"\"Raised when source state of the record is invalid for transition.\"\"\" def", "super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when source state of the record is invalid for", "\"\"\"Raised when source state of the record is invalid for transition.\"\"\" def __init__(self,", "= ( \"Direct modification of state is not allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException):", "when source state of the record is invalid for transition.\"\"\" def __init__(self, source=None,", "( \"Transition from {} to {} is not allowed\".format(source, target) ) super().__init__(**kwargs) class", "invalid for transition.\"\"\" def __init__(self, source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = (", "License; see LICENSE file for more details. \"\"\"OArepo FSM library for record state", "free software; you can redistribute it and/or modify it under # the terms", "target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when record does not inherit FSMMixin.\"\"\" def", ") super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested transition is not available to", "not allowed\".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when record does not inherit", "raise.\"\"\" code = 400 @property def name(self): \"\"\"The status name.\"\"\" return type(self).__name__ def", "(C) 2020 CESNET. # # oarepo-fsm is free software; you can redistribute it", "OArepo FSM module, inherit, don't raise.\"\"\" code = 400 @property def name(self): \"\"\"The", "class InvalidPermissionError(FSMException): \"\"\"Raised when permissions are not satisfied for transition.\"\"\" code = 403", "when required parameter is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when a direct modification of", "is not permitted \" \"for your user {}. Required: '{}'\".format(current_user, permissions) ) super().__init__(**kwargs)", "def name(self): \"\"\"The status name.\"\"\" return type(self).__name__ def get_body(self, environ=None): \"\"\"Get the request", "400 @property def name(self): \"\"\"The status name.\"\"\" return type(self).__name__ def get_body(self, environ=None): \"\"\"Get", "def __init__(self, source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition from {}", "when permissions are not satisfied for transition.\"\"\" code = 403 def __init__(self, permissions=None,", "class RecordNotStatefulError(FSMException): \"\"\"Raised when record does not inherit FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs):", "not inherit FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"{}", "return type(self).__name__ def get_body(self, environ=None): \"\"\"Get the request body.\"\"\" body = dict( status=self.code,", "this record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when source state of the record", "MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required parameter is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when a", "TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested transition is not available to current user.\"\"\" code", "__init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"{} must be a subclass", "\"\"\"Raised when the requested transition is not available to current user.\"\"\" code =", "500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised", "flask_login import current_user from invenio_rest.errors import RESTException class FSMException(RESTException): \"\"\"Base Exception for OArepo", "body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required parameter is", "allowed\".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when record does not inherit FSMMixin.\"\"\"", "when record does not inherit FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description", "InvalidPermissionError(FSMException): \"\"\"Raised when permissions are not satisfied for transition.\"\"\" code = 403 def", "403 def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Direct modification of state", "record is invalid for transition.\"\"\" def __init__(self, source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description", "\"\"\"Initialize exception.\"\"\" self.description = ( \"Transition {} is not available on this record\".format(transition)", "is not allowed\".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when record does not", "\"{} must be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when", "you can redistribute it and/or modify it under # the terms of the", "exception.\"\"\" self.description = ( \"Transition from {} to {} is not allowed\".format(source, target)", "details. \"\"\"OArepo FSM library for record state transitions.\"\"\" import json from flask_login import", "a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when permissions are not", "redistribute it and/or modify it under # the terms of the MIT License;", "inherit FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"{} must", "body[\"errors\"] = errors if self.code and (self.code >= 500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"]", "FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"{} must be", "record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when source state of the record is", "exception.\"\"\" self.description = ( \"{} must be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs)", "not available to current user.\"\"\" code = 404 def __init__(self, transition=None, **kwargs): \"\"\"Initialize", "of the MIT License; see LICENSE file for more details. \"\"\"OArepo FSM library", "modification of record state is attempted.\"\"\" code = 403 def __init__(self, **kwargs): \"\"\"Initialize", "DirectStateModificationError(FSMException): \"\"\"Raised when a direct modification of record state is attempted.\"\"\" code =", "transition.\"\"\" def __init__(self, source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition from", "name(self): \"\"\"The status name.\"\"\" return type(self).__name__ def get_body(self, environ=None): \"\"\"Get the request body.\"\"\"", "message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors = self.get_errors() if self.errors: body[\"errors\"] = errors if", "**kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Direct modification of state is not allowed.\"", "<filename>oarepo_fsm/errors.py<gh_stars>0 # # Copyright (C) 2020 CESNET. # # oarepo-fsm is free software;", "state is not allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested transition", "= 404 def __init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition {}", "of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when permissions are not satisfied for", "return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required parameter is missing.\"\"\" class DirectStateModificationError(FSMException):", "code = 404 def __init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition", "the request body.\"\"\" body = dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors =", "{} is not available on this record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when", "transition.\"\"\" code = 403 def __init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = (", "the MIT License; see LICENSE file for more details. \"\"\"OArepo FSM library for", "for OArepo FSM module, inherit, don't raise.\"\"\" code = 400 @property def name(self):", "exception.\"\"\" self.description = ( \"This transition is not permitted \" \"for your user", "target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition from {} to {} is", "dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors = self.get_errors() if self.errors: body[\"errors\"] =", "self.code and (self.code >= 500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body)", "the requested transition is not available to current user.\"\"\" code = 404 def", ") super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when source state of the record is invalid", "it and/or modify it under # the terms of the MIT License; see", "from {} to {} is not allowed\".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised", "when the requested transition is not available to current user.\"\"\" code = 404", "( \"Transition {} is not available on this record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException):", "\"\"\"Initialize exception.\"\"\" self.description = ( \"Transition from {} to {} is not allowed\".format(source,", "transitions.\"\"\" import json from flask_login import current_user from invenio_rest.errors import RESTException class FSMException(RESTException):", ") errors = self.get_errors() if self.errors: body[\"errors\"] = errors if self.code and (self.code", "RESTException class FSMException(RESTException): \"\"\"Base Exception for OArepo FSM module, inherit, don't raise.\"\"\" code", "def __init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition {} is not", "class InvalidSourceStateError(FSMException): \"\"\"Raised when source state of the record is invalid for transition.\"\"\"", "from flask_login import current_user from invenio_rest.errors import RESTException class FSMException(RESTException): \"\"\"Base Exception for", "file for more details. \"\"\"OArepo FSM library for record state transitions.\"\"\" import json", "code = 403 def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Direct modification", "def get_body(self, environ=None): \"\"\"Get the request body.\"\"\" body = dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\",", "= 403 def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Direct modification of", "is invalid for transition.\"\"\" def __init__(self, source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description =", "= ( \"Transition {} is not available on this record\".format(transition) ) super().__init__(**kwargs) class", "for transition.\"\"\" def __init__(self, source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition", "\"\"\"Raised when record does not inherit FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\"", "is attempted.\"\"\" code = 403 def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = (", "modification of state is not allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when the", "current user.\"\"\" code = 404 def __init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description =", "permissions are not satisfied for transition.\"\"\" code = 403 def __init__(self, permissions=None, **kwargs):", "**kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"This transition is not permitted \" \"for", "transition is not permitted \" \"for your user {}. Required: '{}'\".format(current_user, permissions) )", "class DirectStateModificationError(FSMException): \"\"\"Raised when a direct modification of record state is attempted.\"\"\" code", "hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required", "= self.get_errors() if self.errors: body[\"errors\"] = errors if self.code and (self.code >= 500)", "class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required parameter is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when", "class FSMException(RESTException): \"\"\"Base Exception for OArepo FSM module, inherit, don't raise.\"\"\" code =", "import current_user from invenio_rest.errors import RESTException class FSMException(RESTException): \"\"\"Base Exception for OArepo FSM", "terms of the MIT License; see LICENSE file for more details. \"\"\"OArepo FSM", ">= 500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception", "\"\"\"Exception raised when required parameter is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when a direct", "raised when required parameter is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when a direct modification", "the terms of the MIT License; see LICENSE file for more details. \"\"\"OArepo", "FSM module, inherit, don't raise.\"\"\" code = 400 @property def name(self): \"\"\"The status", ") super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when record does not inherit FSMMixin.\"\"\" def __init__(self,", "super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested transition is not available to current", "name.\"\"\" return type(self).__name__ def get_body(self, environ=None): \"\"\"Get the request body.\"\"\" body = dict(", "\"\"\"Get the request body.\"\"\" body = dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors", "= str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required parameter is missing.\"\"\"", "import RESTException class FSMException(RESTException): \"\"\"Base Exception for OArepo FSM module, inherit, don't raise.\"\"\"", "self.errors: body[\"errors\"] = errors if self.code and (self.code >= 500) and hasattr(g, \"sentry_event_id\"):", "__init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Direct modification of state is not", "self.description = ( \"This transition is not permitted \" \"for your user {}.", "must be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when permissions", "state of the record is invalid for transition.\"\"\" def __init__(self, source=None, target=None, **kwargs):", "inherit, don't raise.\"\"\" code = 400 @property def name(self): \"\"\"The status name.\"\"\" return", "\"\"\"Raised when a direct modification of record state is attempted.\"\"\" code = 403", "def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"{} must be a", "\"This transition is not permitted \" \"for your user {}. Required: '{}'\".format(current_user, permissions)", "available to current user.\"\"\" code = 404 def __init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\"", "\"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required parameter", "record does not inherit FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description =", "json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when required parameter is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised", "a direct modification of record state is attempted.\"\"\" code = 403 def __init__(self,", "code = 400 @property def name(self): \"\"\"The status name.\"\"\" return type(self).__name__ def get_body(self,", "status name.\"\"\" return type(self).__name__ def get_body(self, environ=None): \"\"\"Get the request body.\"\"\" body =", "exception.\"\"\" self.description = ( \"Transition {} is not available on this record\".format(transition) )", "modify it under # the terms of the MIT License; see LICENSE file", "status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors = self.get_errors() if self.errors: body[\"errors\"] = errors", "( \"Direct modification of state is not allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised", "source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition from {} to {}", "be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when permissions are", "don't raise.\"\"\" code = 400 @property def name(self): \"\"\"The status name.\"\"\" return type(self).__name__", "record state is attempted.\"\"\" code = 403 def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description", "not available on this record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when source state", "state transitions.\"\"\" import json from flask_login import current_user from invenio_rest.errors import RESTException class", "(self.code >= 500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException):", "super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when permissions are not satisfied for transition.\"\"\" code =", "more details. \"\"\"OArepo FSM library for record state transitions.\"\"\" import json from flask_login", "\"\"\"Initialize exception.\"\"\" self.description = ( \"Direct modification of state is not allowed.\" )", "type(self).__name__ def get_body(self, environ=None): \"\"\"Get the request body.\"\"\" body = dict( status=self.code, message=self.get_description(environ),", "( \"{} must be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised", "MIT License; see LICENSE file for more details. \"\"\"OArepo FSM library for record", "errors = self.get_errors() if self.errors: body[\"errors\"] = errors if self.code and (self.code >=", "**kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition from {} to {} is not", "environ=None): \"\"\"Get the request body.\"\"\" body = dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, )", "get_body(self, environ=None): \"\"\"Get the request body.\"\"\" body = dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name,", "\"Transition from {} to {} is not allowed\".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException):", "for transition.\"\"\" code = 403 def __init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description =", "record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"{} must be a subclass of", "\"\"\"Base Exception for OArepo FSM module, inherit, don't raise.\"\"\" code = 400 @property", "\"\"\"Initialize exception.\"\"\" self.description = ( \"{} must be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) )", "**kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"{} must be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls)", "def __init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"This transition is not", "available on this record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when source state of", "if self.code and (self.code >= 500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return", "not allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested transition is not", "super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when record does not inherit FSMMixin.\"\"\" def __init__(self, record_cls=None,", "__init__(self, source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition from {} to", "invenio_rest.errors import RESTException class FSMException(RESTException): \"\"\"Base Exception for OArepo FSM module, inherit, don't", "module, inherit, don't raise.\"\"\" code = 400 @property def name(self): \"\"\"The status name.\"\"\"", "self.description = ( \"Transition from {} to {} is not allowed\".format(source, target) )", "oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when permissions are not satisfied for transition.\"\"\"", "{} is not allowed\".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when record does", "( \"This transition is not permitted \" \"for your user {}. Required: '{}'\".format(current_user,", "LICENSE file for more details. \"\"\"OArepo FSM library for record state transitions.\"\"\" import", "2020 CESNET. # # oarepo-fsm is free software; you can redistribute it and/or", "of state is not allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested", "= 400 @property def name(self): \"\"\"The status name.\"\"\" return type(self).__name__ def get_body(self, environ=None):", "is not allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested transition is", "user.\"\"\" code = 404 def __init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = (", "are not satisfied for transition.\"\"\" code = 403 def __init__(self, permissions=None, **kwargs): \"\"\"Initialize", ") super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when permissions are not satisfied for transition.\"\"\" code", "on this record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when source state of the", "import json from flask_login import current_user from invenio_rest.errors import RESTException class FSMException(RESTException): \"\"\"Base", "not satisfied for transition.\"\"\" code = 403 def __init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\"", "self.description = ( \"Direct modification of state is not allowed.\" ) super().__init__(**kwargs) class", "source state of the record is invalid for transition.\"\"\" def __init__(self, source=None, target=None,", "software; you can redistribute it and/or modify it under # the terms of", "current_user from invenio_rest.errors import RESTException class FSMException(RESTException): \"\"\"Base Exception for OArepo FSM module,", "and/or modify it under # the terms of the MIT License; see LICENSE", "subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): \"\"\"Raised when permissions are not satisfied", "{} to {} is not allowed\".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when", "and (self.code >= 500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body) class", "json from flask_login import current_user from invenio_rest.errors import RESTException class FSMException(RESTException): \"\"\"Base Exception", "required parameter is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when a direct modification of record", "\"Direct modification of state is not allowed.\" ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): \"\"\"Raised when", "attempted.\"\"\" code = 403 def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Direct", "missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when a direct modification of record state is attempted.\"\"\"", "= ( \"Transition from {} to {} is not allowed\".format(source, target) ) super().__init__(**kwargs)", "@property def name(self): \"\"\"The status name.\"\"\" return type(self).__name__ def get_body(self, environ=None): \"\"\"Get the", "is missing.\"\"\" class DirectStateModificationError(FSMException): \"\"\"Raised when a direct modification of record state is", "to current user.\"\"\" code = 404 def __init__(self, transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description", "from invenio_rest.errors import RESTException class FSMException(RESTException): \"\"\"Base Exception for OArepo FSM module, inherit,", "# Copyright (C) 2020 CESNET. # # oarepo-fsm is free software; you can", "see LICENSE file for more details. \"\"\"OArepo FSM library for record state transitions.\"\"\"", "\"\"\"The status name.\"\"\" return type(self).__name__ def get_body(self, environ=None): \"\"\"Get the request body.\"\"\" body", "RecordNotStatefulError(FSMException): \"\"\"Raised when record does not inherit FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize", "permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"This transition is not permitted \"", "the record is invalid for transition.\"\"\" def __init__(self, source=None, target=None, **kwargs): \"\"\"Initialize exception.\"\"\"", "= errors if self.code and (self.code >= 500) and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] =", "\"\"\"Raised when permissions are not satisfied for transition.\"\"\" code = 403 def __init__(self,", "to {} is not allowed\".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): \"\"\"Raised when record", "body = dict( status=self.code, message=self.get_description(environ), error_module=\"OArepo-FSM\", error_class=self.name, ) errors = self.get_errors() if self.errors:", "self.description = ( \"{} must be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class", "and hasattr(g, \"sentry_event_id\"): body[\"error_id\"] = str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): \"\"\"Exception raised when", "def __init__(self, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Direct modification of state is", "for record state transitions.\"\"\" import json from flask_login import current_user from invenio_rest.errors import", "transition=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"Transition {} is not available on", "class TransitionNotAvailableError(FSMException): \"\"\"Raised when the requested transition is not available to current user.\"\"\"", "= ( \"{} must be a subclass of oarepo_fsm.mixins.FSMMixin\".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException):", "code = 403 def __init__(self, permissions=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = ( \"This", "is not available on this record\".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): \"\"\"Raised when source", "does not inherit FSMMixin.\"\"\" def __init__(self, record_cls=None, **kwargs): \"\"\"Initialize exception.\"\"\" self.description = (", "# # Copyright (C) 2020 CESNET. # # oarepo-fsm is free software; you" ]
[ "+ file cmd = 'rm ' + path #print cmd os.system(cmd) def parse_opts():", "MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing .as with", "re.compile('\\.as$|\\.mxml$') def windmill(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd)", "swfs if target == 'windmill': windmill() # Build only the test app we", "= root + '/' + file cmd = 'rm ' + path #print", "For replacing .as with .swf as_re = re.compile('\\.as$|\\.mxml$') def windmill(): cmd = MXMLC_PATH", "def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is", "parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET',", "' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root, dirs, file_list in", "all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts, args = parser.parse_args() return opts,", "args def main(o, a): target = o.target # Build only the AS tests", "target == 'windmill': windmill() # Build only the test app we use to", "= 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing .as with .swf", "only the AS tests into loadable swfs if target == 'windmill': windmill() #", "bootstrap() # Clean out any swfs in the directory elif target == 'clean':", "Build everything, natch elif target == 'all': windmill() bootstrap() # Clean out any", "= parser.parse_args() return opts, args def main(o, a): target = o.target # Build", "with .swf as_re = re.compile('\\.as$|\\.mxml$') def windmill(): cmd = MXMLC_PATH + ' -source-path=.", "windmill() # Build only the test app we use to run the tests", "'all', 'clean'), default='all') opts, args = parser.parse_args() return opts, args def main(o, a):", "into loadable swfs if target == 'windmill': windmill() # Build only the test", "tests into loadable swfs if target == 'windmill': windmill() # Build only the", "== 'clean': clean() else: print 'Not a valid target.' if __name__ == \"__main__\":", "file.endswith('.swc'): path = root + '/' + file cmd = 'rm ' +", "-o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o", "== 'all': windmill() bootstrap() # Clean out any swfs in the directory elif", "is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts, args = parser.parse_args() return", "root, dirs, file_list in os.walk('./'): for file in file_list: if file.endswith('.swf') or file.endswith('.swc'):", "dirs, file_list in os.walk('./'): for file in file_list: if file.endswith('.swf') or file.endswith('.swc'): path", "natch elif target == 'all': windmill() bootstrap() # Clean out any swfs in", "-source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root, dirs, file_list in os.walk('./'):", "bootstrap(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean():", "tests against elif target == 'bootstrap': bootstrap() # Build everything, natch elif target", "run the tests against elif target == 'bootstrap': bootstrap() # Build everything, natch", "Location of compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For", "'bootstrap', 'all', 'clean'), default='all') opts, args = parser.parse_args() return opts, args def main(o,", "windmill() bootstrap() # Clean out any swfs in the directory elif target ==", "'clean'), default='all') opts, args = parser.parse_args() return opts, args def main(o, a): target", "metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts, args = parser.parse_args() return opts, args", "only the test app we use to run the tests against elif target", "choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts, args = parser.parse_args() return opts, args def", "cmd os.system(cmd) def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean,", "' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd = MXMLC_PATH + '", "if target == 'windmill': windmill() # Build only the test app we use", "import re import shutil # Location of compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces", "elif target == 'all': windmill() bootstrap() # Clean out any swfs in the", "'all': windmill() bootstrap() # Clean out any swfs in the directory elif target", "for file in file_list: if file.endswith('.swf') or file.endswith('.swc'): path = root + '/'", "# Location of compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' #", "the tests against elif target == 'bootstrap': bootstrap() # Build everything, natch elif", "python import optparse import os import re import shutil # Location of compiler", "target = o.target # Build only the AS tests into loadable swfs if", "-verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing .as with .swf as_re = re.compile('\\.as$|\\.mxml$')", "os.system(cmd) def clean(): for root, dirs, file_list in os.walk('./'): for file in file_list:", "def clean(): for root, dirs, file_list in os.walk('./'): for file in file_list: if", "path #print cmd os.system(cmd) def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build", "parser.parse_args() return opts, args def main(o, a): target = o.target # Build only", "AS tests into loadable swfs if target == 'windmill': windmill() # Build only", "= re.compile('\\.as$|\\.mxml$') def windmill(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf'", "help='build TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts,", "./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf'", "== 'windmill': windmill() # Build only the test app we use to run", "the test app we use to run the tests against elif target ==", "# Clean out any swfs in the directory elif target == 'clean': clean()", "args = parser.parse_args() return opts, args def main(o, a): target = o.target #", "cmd = 'rm ' + path #print cmd os.system(cmd) def parse_opts(): parser =", "'clean': clean() else: print 'Not a valid target.' if __name__ == \"__main__\": main(*parse_opts())", ".as with .swf as_re = re.compile('\\.as$|\\.mxml$') def windmill(): cmd = MXMLC_PATH + '", "any swfs in the directory elif target == 'clean': clean() else: print 'Not", "main(o, a): target = o.target # Build only the AS tests into loadable", ".swf as_re = re.compile('\\.as$|\\.mxml$') def windmill(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as", "directory elif target == 'clean': clean() else: print 'Not a valid target.' if", "return opts, args def main(o, a): target = o.target # Build only the", "def main(o, a): target = o.target # Build only the AS tests into", "+ ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd = MXMLC_PATH +", "os.walk('./'): for file in file_list: if file.endswith('.swf') or file.endswith('.swc'): path = root +", "./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root, dirs, file_list in os.walk('./'): for file in", "-compiler.strict -compiler.show-actionscript-warnings' # For replacing .as with .swf as_re = re.compile('\\.as$|\\.mxml$') def windmill():", "opts, args def main(o, a): target = o.target # Build only the AS", "shutil # Location of compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings'", "-debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing .as with .swf as_re =", "'--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'),", "loadable swfs if target == 'windmill': windmill() # Build only the test app", "opts, args = parser.parse_args() return opts, args def main(o, a): target = o.target", "parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all',", "parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is all)',", "# Build only the AS tests into loadable swfs if target == 'windmill':", "app we use to run the tests against elif target == 'bootstrap': bootstrap()", "out any swfs in the directory elif target == 'clean': clean() else: print", "for root, dirs, file_list in os.walk('./'): for file in file_list: if file.endswith('.swf') or", "file_list in os.walk('./'): for file in file_list: if file.endswith('.swf') or file.endswith('.swc'): path =", "cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd", "if file.endswith('.swf') or file.endswith('.swc'): path = root + '/' + file cmd =", "file cmd = 'rm ' + path #print cmd os.system(cmd) def parse_opts(): parser", "'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing .as with .swf as_re", "default='all') opts, args = parser.parse_args() return opts, args def main(o, a): target =", "+ path #print cmd os.system(cmd) def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target',", "target == 'clean': clean() else: print 'Not a valid target.' if __name__ ==", "def bootstrap(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def", "= 'rm ' + path #print cmd os.system(cmd) def parse_opts(): parser = optparse.OptionParser()", "./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as", "= optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill',", "bootstrap() # Build everything, natch elif target == 'all': windmill() bootstrap() # Clean", "compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing .as", "the directory elif target == 'clean': clean() else: print 'Not a valid target.'", "-incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing .as with .swf as_re = re.compile('\\.as$|\\.mxml$') def", "TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts, args", "+ ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root, dirs, file_list", "import optparse import os import re import shutil # Location of compiler MXMLC_PATH", "re import shutil # Location of compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true", "elif target == 'bootstrap': bootstrap() # Build everything, natch elif target == 'all':", "the AS tests into loadable swfs if target == 'windmill': windmill() # Build", "(windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts, args =", "Build only the test app we use to run the tests against elif", "# Build everything, natch elif target == 'all': windmill() bootstrap() # Clean out", "elif target == 'clean': clean() else: print 'Not a valid target.' if __name__", "# For replacing .as with .swf as_re = re.compile('\\.as$|\\.mxml$') def windmill(): cmd =", "windmill(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap():", "-compiler.show-actionscript-warnings' # For replacing .as with .swf as_re = re.compile('\\.as$|\\.mxml$') def windmill(): cmd", "'windmill': windmill() # Build only the test app we use to run the", "everything, natch elif target == 'all': windmill() bootstrap() # Clean out any swfs", "clean(): for root, dirs, file_list in os.walk('./'): for file in file_list: if file.endswith('.swf')", "or file.endswith('.swc'): path = root + '/' + file cmd = 'rm '", "root + '/' + file cmd = 'rm ' + path #print cmd", "= o.target # Build only the AS tests into loadable swfs if target", "Build only the AS tests into loadable swfs if target == 'windmill': windmill()", "'bootstrap': bootstrap() # Build everything, natch elif target == 'all': windmill() bootstrap() #", "optparse import os import re import shutil # Location of compiler MXMLC_PATH =", "of compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing", "MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd = MXMLC_PATH", "# Build only the test app we use to run the tests against", "as_re = re.compile('\\.as$|\\.mxml$') def windmill(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o", "against elif target == 'bootstrap': bootstrap() # Build everything, natch elif target ==", "path = root + '/' + file cmd = 'rm ' + path", "target == 'bootstrap': bootstrap() # Build everything, natch elif target == 'all': windmill()", "in file_list: if file.endswith('.swf') or file.endswith('.swc'): path = root + '/' + file", "a): target = o.target # Build only the AS tests into loadable swfs", "+ '/' + file cmd = 'rm ' + path #print cmd os.system(cmd)", "'rm ' + path #print cmd os.system(cmd) def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t',", "replacing .as with .swf as_re = re.compile('\\.as$|\\.mxml$') def windmill(): cmd = MXMLC_PATH +", "' + path #print cmd os.system(cmd) def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target',", "cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for", "we use to run the tests against elif target == 'bootstrap': bootstrap() #", "<gh_stars>1-10 #!/usr/bin/env python import optparse import os import re import shutil # Location", "o.target # Build only the AS tests into loadable swfs if target ==", "os.system(cmd) def bootstrap(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd)", "file_list: if file.endswith('.swf') or file.endswith('.swc'): path = root + '/' + file cmd", "optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill', 'bootstrap',", "dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all')", "#!/usr/bin/env python import optparse import os import re import shutil # Location of", "in os.walk('./'): for file in file_list: if file.endswith('.swf') or file.endswith('.swc'): path = root", "MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root, dirs,", "Clean out any swfs in the directory elif target == 'clean': clean() else:", "import shutil # Location of compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict", "import os import re import shutil # Location of compiler MXMLC_PATH = 'mxmlc", "'/' + file cmd = 'rm ' + path #print cmd os.system(cmd) def", "in the directory elif target == 'clean': clean() else: print 'Not a valid", "to run the tests against elif target == 'bootstrap': bootstrap() # Build everything,", "-source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd = MXMLC_PATH + ' -source-path=.", "file in file_list: if file.endswith('.swf') or file.endswith('.swc'): path = root + '/' +", "file.endswith('.swf') or file.endswith('.swc'): path = root + '/' + file cmd = 'rm", "use to run the tests against elif target == 'bootstrap': bootstrap() # Build", "target == 'all': windmill() bootstrap() # Clean out any swfs in the directory", "= MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd =", "default is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts, args = parser.parse_args()", "test app we use to run the tests against elif target == 'bootstrap':", "= MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root,", "os.system(cmd) def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default", "os import re import shutil # Location of compiler MXMLC_PATH = 'mxmlc -debug", "def windmill(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def", "swfs in the directory elif target == 'clean': clean() else: print 'Not a", "-o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root, dirs, file_list in os.walk('./'): for file", "== 'bootstrap': bootstrap() # Build everything, natch elif target == 'all': windmill() bootstrap()", "#print cmd os.system(cmd) def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET", "./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root, dirs, file_list in os.walk('./'): for" ]
[ "<filename>Mathematics/106bombyx/usage.py #!/usr/bin/python # -*- coding: utf-8 -*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte", "7 16:31:56 2014 <NAME> ## Last update Sun Feb 22 18:32:16 2015 <NAME>", "in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME> ## Login <<EMAIL>> ## ## Started", "## Made by <NAME> ## Login <<EMAIL>> ## ## Started on Sun Dec", "Dec 7 16:31:56 2014 <NAME> ## Last update Sun Feb 22 18:32:16 2015", "Feb 22 18:32:16 2015 <NAME> ## print \"Usage: ./106bombyx k(integer >= 1 and", "-*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME> ##", "Sun Dec 7 16:31:56 2014 <NAME> ## Last update Sun Feb 22 18:32:16", "<NAME> ## Login <<EMAIL>> ## ## Started on Sun Dec 7 16:31:56 2014", "<NAME> ## Last update Sun Feb 22 18:32:16 2015 <NAME> ## print \"Usage:", "Started on Sun Dec 7 16:31:56 2014 <NAME> ## Last update Sun Feb", "## Started on Sun Dec 7 16:31:56 2014 <NAME> ## Last update Sun", "Made by <NAME> ## Login <<EMAIL>> ## ## Started on Sun Dec 7", "## ## Made by <NAME> ## Login <<EMAIL>> ## ## Started on Sun", "2014 <NAME> ## Last update Sun Feb 22 18:32:16 2015 <NAME> ## print", "by <NAME> ## Login <<EMAIL>> ## ## Started on Sun Dec 7 16:31:56", "-*- coding: utf-8 -*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made", "Login <<EMAIL>> ## ## Started on Sun Dec 7 16:31:56 2014 <NAME> ##", "Last update Sun Feb 22 18:32:16 2015 <NAME> ## print \"Usage: ./106bombyx k(integer", "Sun Feb 22 18:32:16 2015 <NAME> ## print \"Usage: ./106bombyx k(integer >= 1", "usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME> ## Login <<EMAIL>> ## ##", "utf-8 -*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME>", "usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME> ## Login <<EMAIL>>", "22 18:32:16 2015 <NAME> ## print \"Usage: ./106bombyx k(integer >= 1 and <=", "# -*- coding: utf-8 -*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ##", "/home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME> ## Login <<EMAIL>> ## ## Started on", "18:32:16 2015 <NAME> ## print \"Usage: ./106bombyx k(integer >= 1 and <= 4).\"", "## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME> ## Login", "## ## Started on Sun Dec 7 16:31:56 2014 <NAME> ## Last update", "for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME> ## Login <<EMAIL>> ##", "## Last update Sun Feb 22 18:32:16 2015 <NAME> ## print \"Usage: ./106bombyx", "#!/usr/bin/python # -*- coding: utf-8 -*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ##", "## Login <<EMAIL>> ## ## Started on Sun Dec 7 16:31:56 2014 <NAME>", "<<EMAIL>> ## ## Started on Sun Dec 7 16:31:56 2014 <NAME> ## Last", "update Sun Feb 22 18:32:16 2015 <NAME> ## print \"Usage: ./106bombyx k(integer >=", "on Sun Dec 7 16:31:56 2014 <NAME> ## Last update Sun Feb 22", "16:31:56 2014 <NAME> ## Last update Sun Feb 22 18:32:16 2015 <NAME> ##", "coding: utf-8 -*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by" ]
[ "'Gove': {'id': '093', 'delta': 600, 'frames': 4}, 'Grafton': {'id': '283', 'delta': 600, 'frames':", "%s' % radar_id self._delta = delta or RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames']", "outfile=None, logger=None): self._log = logger or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id = '%03d'", "self._get_frames() if frames is not None: self._log.debug('Got %s frames for %s at %s',", "4}, 'Dampier': {'id': '153', 'delta': 600, 'frames': 4}, 'Darwin': {'id': '633', 'delta': 360,", "600, 'frames': 4}, 'Wyndham': {'id': '073', 'delta': 600, 'frames': 4}, 'Yarrawonga': {'id': '493',", "self._log.debug('Getting legend at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): '''", "= '%03d' % radar_id valids = ', '.join(sorted(RADARS.keys())) if not radar_id and location", "or None def _get_image(self, url): # pylint: disable=no-self-use ''' Fetch an image from", "and distance-from-radar range markings, and merge into a single image. ''' self._log.debug('Getting background", "'773', 'delta': 360, 'frames': 6}, 'Watheroo': {'id': '793', 'delta': 360, 'frames': 6}, 'Weipa':", "for %s at %s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 %", "weather image from the BOM website. Note that get_image() returns None if the", "to produce a frame. Collect and return the frames, ignoring any blanks. If", "%s, using %s\", radar_id, delta) if radar_id and not frames: frames = 6", "self._log.debug('Getting frames for %s at %s', self._location, self._t0) bg = self._get_background() legend =", "radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log = logger or logging.getLogger(__name__) if isinstance(radar_id, int):", "%s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' % (self._radar_id, time_str) url = self._get_url(suffix) return", "def _get_loop(self): ''' Return an animated GIF comprising a set of frames, where", "{'id': '503', 'delta': 600, 'frames': 4}, 'Melbourne': {'id': '023', 'delta': 360, 'frames': 6},", "'Newdegate': {'id': '383', 'delta': 360, 'frames': 6}, 'NorfolkIs': {'id': '623', 'delta': 600, 'frames':", "RADARS[location]['id'] self._outfile = outfile self._t0 = 0 self._current = self.current # Public methods", "* n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in frame_numbers] def _get_url(self, path): #", "image is not None: background = PIL.Image.alpha_composite(background, image) return background def _get_frames(self): '''", "''' self._log.debug('Getting frames for %s at %s', self._location, self._t0) bg = self._get_background() legend", "None for layer in ('topography', 'locations', 'range'): self._log.debug('Getting %s for %s at %s',", "frames or None def _get_image(self, url): # pylint: disable=no-self-use ''' Fetch an image", "outdir) try: with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write", "6}, 'Townsville': {'id': '733', 'delta': 600, 'frames': 4}, 'WaggaWagga': {'id': '553', 'delta': 600,", "600, 'frames': 4}, 'SellicksHill': {'id': '463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id': '583',", "'683', 'delta': 600, 'frames': 4}, 'Bowen': {'id': '243', 'delta': 600, 'frames': 4}, 'Brisbane':", "'delta': 600, 'frames': 4}, 'Ceduna': {'id': '333', 'delta': 600, 'frames': 4}, 'Dampier': {'id':", "{'id': '393', 'delta': 600, 'frames': 4}, 'Hobart': {'id': '763', 'delta': 360, 'frames': 6},", "any blanks. If no frames were produced, return None (the caller must expect", "PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return rgba_img return None def _get_legend(self): ''' Fetch", "'143', 'delta': 600, 'frames': 4}, 'Namoi': {'id': '693', 'delta': 600, 'frames': 4}, 'Newcastle':", "at %s', self._location, self._t0) loop = io.BytesIO() frames = self._get_frames() if frames is", "using %s\", radar_id, delta) if radar_id and not frames: frames = 6 self._log.error(\"No", "600, 'frames': 4}, 'Mackay': {'id': '223', 'delta': 600, 'frames': 4}, 'Marburg': {'id': '503',", "'frames': 6}, 'MtGambier': {'id': '143', 'delta': 600, 'frames': 4}, 'Namoi': {'id': '693', 'delta':", "'MtGambier': {'id': '143', 'delta': 600, 'frames': 4}, 'Namoi': {'id': '693', 'delta': 600, 'frames':", "no frames were produced, return None (the caller must expect this). ''' self._log.debug('Getting", "'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta': 360, 'frames': 6}, 'Katherine': {'id':", "for path %s', path) return 'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str): ''' Return", "= self._get_url(suffix1) image = self._get_image(url1) if image is not None: background = PIL.Image.alpha_composite(background,", "''' self._log.debug('Getting legend at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self):", "radar ID %s, using %s\", radar_id, delta) if radar_id and not frames: frames", "{'id': '023', 'delta': 360, 'frames': 6}, 'Mildura': {'id': '303', 'delta': 600, 'frames': 4},", "'Warrego': {'id': '673', 'delta': 600, 'frames': 4}, 'Warruwi': {'id': '773', 'delta': 360, 'frames':", "600, 'frames': 4}, 'Darwin': {'id': '633', 'delta': 360, 'frames': 6}, 'Emerald': {'id': '723',", "% (self._radar_id, layer) url1 = self._get_url(suffix1) image = self._get_image(url1) if image is not", "'delta': 360, 'frames': 6}, 'HallsCreek': {'id': '393', 'delta': 600, 'frames': 4}, 'Hobart': {'id':", "if the image could not be fetched, so the caller must deal with", "360, 'frames': 6}, 'Newdegate': {'id': '383', 'delta': 360, 'frames': 6}, 'NorfolkIs': {'id': '623',", "at %s', layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 =", "frames, where each frame includes a background, one or more supplemental layers, a", "self._get_url(suffix1) image = self._get_image(url1) if image is not None: background = PIL.Image.alpha_composite(background, image)", "frames for %s at %s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile:", "{'id': '333', 'delta': 600, 'frames': 4}, 'Dampier': {'id': '153', 'delta': 600, 'frames': 4},", "self._log.debug('Getting %s for %s at %s', layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' %", "'delta': 600, 'frames': 4}, 'Moree': {'id': '533', 'delta': 600, 'frames': 4}, 'MorningtonIs': {'id':", "path %s', path) return 'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str): ''' Return a", "'frames': 6}, 'Newdegate': {'id': '383', 'delta': 360, 'frames': 6}, 'NorfolkIs': {'id': '623', 'delta':", "using %s\", radar_id, frames) self._location = location or 'ID %s' % radar_id self._delta", "background, one or more supplemental layers, a colorbar legend, and a radar image.", "360, 'frames': 6}, 'Albany': {'id': '313', 'delta': 600, 'frames': 4}, 'AliceSprings': {'id': '253',", "specified, ignoring 'radar_id'\") elif location: self._log.error(\"Bad 'location' specified, using ID %s (valid locations", "colorbar legend image. ''' self._log.debug('Getting legend at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return", "duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO frames for %s at %s', self._location,", "using ID %s (valid locations are: %s)\", radar_id, valids) if radar_id and not", "location not in RADARS: location = 'Sydney' self._log.error(\"Bad 'location' specified, using '%s' (valid", "a single image. ''' self._log.debug('Getting background for %s at %s', self._location, self._t0) suffix0", "- (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in frame_numbers] def _get_url(self,", "'Moree': {'id': '533', 'delta': 600, 'frames': 4}, 'MorningtonIs': {'id': '363', 'delta': 600, 'frames':", "''' self._log.debug('Getting radar imagery for %s at %s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png'", "> self._t0: self._t0 = t1 self._current = self._get_loop() return self._current # Private methods", "self._get_image(url0) if background is None: return None for layer in ('topography', 'locations', 'range'):", "it with a common background image, then overlay on the legend to produce", "save_all=True) else: self._log.warning('Got NO frames for %s at %s', self._location, self._t0) PIL.Image.new('RGB', (512,", "600, 'frames': 4}, 'AliceSprings': {'id': '253', 'delta': 600, 'frames': 4}, 'Bairnsdale': {'id': '683',", "4}, 'Warrego': {'id': '673', 'delta': 600, 'frames': 4}, 'Warruwi': {'id': '773', 'delta': 360,", "4}, 'Moree': {'id': '533', 'delta': 600, 'frames': 4}, 'MorningtonIs': {'id': '363', 'delta': 600,", "'delta': 360, 'frames': 6}, 'WillisIs': {'id': '413', 'delta': 600, 'frames': 4}, 'Wollongong': {'id':", "path def _get_wximg(self, time_str): ''' Return a radar weather image from the BOM", "''' Return an animated GIF comprising a set of frames, where each frame", "representing YYYYMMDDHHMM times for the most recent set of radar images to be", "'frames': 6}, 'Sydney': {'id': '713', 'delta': 360, 'frames': 6}, 'Townsville': {'id': '733', 'delta':", "'293', 'delta': 600, 'frames': 4}, 'Longreach': {'id': '563', 'delta': 600, 'frames': 4}, 'Mackay':", "'frames': 4}, 'Giles': {'id': '443', 'delta': 600, 'frames': 4}, 'Gladstone': {'id': '233', 'delta':", "'Ceduna': {'id': '333', 'delta': 600, 'frames': 4}, 'Dampier': {'id': '153', 'delta': 600, 'frames':", "'delta': 360, 'frames': 6}, 'Learmonth': {'id': '293', 'delta': 600, 'frames': 4}, 'Longreach': {'id':", "background is None: return None for layer in ('topography', 'locations', 'range'): self._log.debug('Getting %s", "4}, 'Newcastle': {'id': '043', 'delta': 360, 'frames': 6}, 'Newdegate': {'id': '383', 'delta': 360,", "specified, using ID %s (valid locations are: %s)\", radar_id, valids) if radar_id and", "{'id': '763', 'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta': 360, 'frames': 6},", "'093', 'delta': 600, 'frames': 4}, 'Grafton': {'id': '283', 'delta': 600, 'frames': 4}, 'Gympie':", "legend at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): ''' Return", "except OSError: self._log.error('Could not create directory %s', outdir) try: with open(self._outfile, 'wb') as", "None def _get_image(self, url): # pylint: disable=no-self-use ''' Fetch an image from the", "'223', 'delta': 600, 'frames': 4}, 'Marburg': {'id': '503', 'delta': 600, 'frames': 4}, 'Melbourne':", "557)).save(loop, format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir) except", "'frames': 4}, 'Wollongong': {'id': '033', 'delta': 360, 'frames': 6}, 'Woomera': {'id': '273', 'delta':", "a radar weather image from the BOM website. Note that get_image() returns None", "the BOM website. Note that get_image() returns None if the image could not", "'073', 'delta': 600, 'frames': 4}, 'Yarrawonga': {'id': '493', 'delta': 360, 'frames': 6}, }", "radar_id: if location in RADARS: radar_id = None self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\")", "fg = self._get_wximg(time_str) if fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0))", "def _get_wximg(self, time_str): ''' Return a radar weather image from the BOM website.", "{'id': '053', 'delta': 600, 'frames': 4}, 'Ceduna': {'id': '333', 'delta': 600, 'frames': 4},", "ID %s (valid locations are: %s)\", radar_id, valids) if radar_id and not delta:", "4}, 'AliceSprings': {'id': '253', 'delta': 600, 'frames': 4}, 'Bairnsdale': {'id': '683', 'delta': 600,", "# pylint: disable=no-self-use ''' Fetch an image from the BOM. ''' self._log.debug('Getting image", "radar_id and not frames: frames = 6 self._log.error(\"No 'frames' specified for radar ID", "Note that get_image() returns None if the image could not be fetched, so", "None: return None for layer in ('topography', 'locations', 'range'): self._log.debug('Getting %s for %s", "at %s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got", "produced, return None (the caller must expect this). ''' self._log.debug('Getting frames for %s", "600, 'frames': 4}, 'Esperance': {'id': '323', 'delta': 600, 'frames': 4}, 'Geraldton': {'id': '063',", "starting at %s', self._t0) frame_numbers = range(self._frames, 0, -1) tz = dt.timezone.utc f", "'703', 'delta': 360, 'frames': 6}, 'PortHedland': {'id': '163', 'delta': 600, 'frames': 4}, 'SellicksHill':", "'delta': 600, 'frames': 4}, 'Dampier': {'id': '153', 'delta': 600, 'frames': 4}, 'Darwin': {'id':", "try: os.makedirs(outdir) except OSError: self._log.error('Could not create directory %s', outdir) try: with open(self._outfile,", "'053', 'delta': 600, 'frames': 4}, 'Ceduna': {'id': '333', 'delta': 600, 'frames': 4}, 'Dampier':", "or more supplemental layers, a colorbar legend, and a radar image. ''' self._log.info('Getting", "%s)\", location, valids) if radar_id: if location in RADARS: radar_id = None self._log.error(\"Valid", "'163', 'delta': 600, 'frames': 4}, 'SellicksHill': {'id': '463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine':", "self._t0) loop = io.BytesIO() frames = self._get_frames() if frames is not None: self._log.debug('Got", "600, 'frames': 4}, 'Bowen': {'id': '243', 'delta': 600, 'frames': 4}, 'Brisbane': {'id': '663',", "= ', '.join(sorted(RADARS.keys())) if not radar_id and location not in RADARS: location =", "a list of strings representing YYYYMMDDHHMM times for the most recent set of", "not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames or None def _get_image(self,", "OSError: self._log.error('Could not create directory %s', outdir) try: with open(self._outfile, 'wb') as outfile:", "{'id': '693', 'delta': 600, 'frames': 4}, 'Newcastle': {'id': '043', 'delta': 360, 'frames': 6},", "{'id': '683', 'delta': 600, 'frames': 4}, 'Bowen': {'id': '243', 'delta': 600, 'frames': 4},", "360, 'frames': 6}, 'Townsville': {'id': '733', 'delta': 600, 'frames': 4}, 'WaggaWagga': {'id': '553',", "'.join(sorted(RADARS.keys())) if not radar_id and location not in RADARS: location = 'Sydney' self._log.error(\"Bad", "if not radar_id and location not in RADARS: location = 'Sydney' self._log.error(\"Bad 'location'", "'frames': 4}, 'Gove': {'id': '093', 'delta': 600, 'frames': 4}, 'Grafton': {'id': '283', 'delta':", "%s\", radar_id, delta) if radar_id and not frames: frames = 6 self._log.error(\"No 'frames'", "self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\") elif location: self._log.error(\"Bad 'location' specified, using ID %s", "(512, 557)).save(loop, format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir)", "not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could not create directory %s', outdir) try:", "images to be used to create the animated GIF. ''' self._log.debug('Getting time strings", "'Learmonth': {'id': '293', 'delta': 600, 'frames': 4}, 'Longreach': {'id': '563', 'delta': 600, 'frames':", "'delta': 600, 'frames': 4}, 'Warrego': {'id': '673', 'delta': 600, 'frames': 4}, 'Warruwi': {'id':", "600, 'frames': 4}, 'Longreach': {'id': '563', 'delta': 600, 'frames': 4}, 'Mackay': {'id': '223',", "map, then the topography, locations (e.g. city names), and distance-from-radar range markings, and", "== 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return rgba_img return None", "'delta': 360, 'frames': 6}, 'Perth': {'id': '703', 'delta': 360, 'frames': 6}, 'PortHedland': {'id':", "6}, 'Emerald': {'id': '723', 'delta': 600, 'frames': 4}, 'Esperance': {'id': '323', 'delta': 600,", "if bg and legend: for time_str in self._get_time_strs(): fg = self._get_wximg(time_str) if fg", "frames for %s at %s', self._location, self._t0) bg = self._get_background() legend = self._get_legend()", "'Brisbane': {'id': '663', 'delta': 360, 'frames': 6}, 'Broome': {'id': '173', 'delta': 600, 'frames':", "radar_id or RADARS[location]['id'] self._outfile = outfile self._t0 = 0 self._current = self.current #", "'frames': 4}, 'Newcastle': {'id': '043', 'delta': 360, 'frames': 6}, 'Newdegate': {'id': '383', 'delta':", "(valid locations are: %s)\", location, valids) if radar_id: if location in RADARS: radar_id", "{'id': '323', 'delta': 600, 'frames': 4}, 'Geraldton': {'id': '063', 'delta': 600, 'frames': 4},", "'083', 'delta': 360, 'frames': 6}, 'HallsCreek': {'id': '393', 'delta': 600, 'frames': 4}, 'Hobart':", "where each frame includes a background, one or more supplemental layers, a colorbar", "4}, 'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames': 6}, 'Sydney': {'id': '713', 'delta': 360,", "frames=None, outfile=None, logger=None): self._log = logger or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id =", "dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in frame_numbers] def", "frames = self._get_frames() if frames is not None: self._log.debug('Got %s frames for %s", "RADARS = { 'Adelaide': {'id': '643', 'delta': 360, 'frames': 6}, 'Albany': {'id': '313',", "360, 'frames': 6}, 'HallsCreek': {'id': '393', 'delta': 600, 'frames': 4}, 'Hobart': {'id': '763',", "isinstance(radar_id, int): radar_id = '%03d' % radar_id valids = ', '.join(sorted(RADARS.keys())) if not", "''' Fetch a radar image for each expected time, composite it with a", "'frames': 4}, 'Marburg': {'id': '503', 'delta': 600, 'frames': 4}, 'Melbourne': {'id': '023', 'delta':", "'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str): ''' Return a radar weather image from", "used to create the animated GIF. ''' self._log.debug('Getting time strings starting at %s',", "'delta': 600, 'frames': 4}, 'NWTasmania': {'id': '523', 'delta': 360, 'frames': 6}, 'Perth': {'id':", "are: %s)\", location, valids) if radar_id: if location in RADARS: radar_id = None", "%s', self._location, self._t0) loop = io.BytesIO() frames = self._get_frames() if frames is not", "{'id': '153', 'delta': 600, 'frames': 4}, 'Darwin': {'id': '633', 'delta': 360, 'frames': 6},", "self._frames = frames or RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id'] self._outfile = outfile", "create directory %s', outdir) try: with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except IOError:", "an image from the BOM. ''' self._log.debug('Getting image %s', url) response = requests.get(url)", "{'id': '233', 'delta': 600, 'frames': 4}, 'Gove': {'id': '093', 'delta': 600, 'frames': 4},", "'043', 'delta': 360, 'frames': 6}, 'Newdegate': {'id': '383', 'delta': 360, 'frames': 6}, 'NorfolkIs':", "is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames or None def", "'delta': 600, 'frames': 4}, 'Gympie': {'id': '083', 'delta': 360, 'frames': 6}, 'HallsCreek': {'id':", "'frames': 6}, 'Katherine': {'id': '423', 'delta': 360, 'frames': 6}, 'Learmonth': {'id': '293', 'delta':", "4}, 'Esperance': {'id': '323', 'delta': 600, 'frames': 4}, 'Geraldton': {'id': '063', 'delta': 600,", "'Longreach': {'id': '563', 'delta': 600, 'frames': 4}, 'Mackay': {'id': '223', 'delta': 600, 'frames':", "4}, 'Wyndham': {'id': '073', 'delta': 600, 'frames': 4}, 'Yarrawonga': {'id': '493', 'delta': 360,", "the current BOM radar-loop image. ''' now = int(time.time()) t1 = now -", "self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): ''' Return an animated GIF comprising a set", "could not be fetched, so the caller must deal with that possibility. '''", "{'id': '633', 'delta': 360, 'frames': 6}, 'Emerald': {'id': '723', 'delta': 600, 'frames': 4},", "f = lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for", "'delta': 360, 'frames': 6}, 'Katherine': {'id': '423', 'delta': 360, 'frames': 6}, 'Learmonth': {'id':", "'Mackay': {'id': '223', 'delta': 600, 'frames': 4}, 'Marburg': {'id': '503', 'delta': 600, 'frames':", "an animated GIF comprising a set of frames, where each frame includes a", "'frames': 4}, 'Darwin': {'id': '633', 'delta': 360, 'frames': 6}, 'Emerald': {'id': '723', 'delta':", "__init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log = logger or logging.getLogger(__name__) if", "or RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id'] self._outfile = outfile self._t0 = 0", "def _get_frames(self): ''' Fetch a radar image for each expected time, composite it", "600, 'frames': 4}, 'Ceduna': {'id': '333', 'delta': 600, 'frames': 4}, 'Dampier': {'id': '153',", "{'id': '243', 'delta': 600, 'frames': 4}, 'Brisbane': {'id': '663', 'delta': 360, 'frames': 6},", "logging import os import time import PIL.Image import requests RADARS = { 'Adelaide':", "RADARS: location = 'Sydney' self._log.error(\"Bad 'location' specified, using '%s' (valid locations are: %s)\",", "n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in frame_numbers] def _get_url(self, path): # pylint:", "4}, 'Darwin': {'id': '633', 'delta': 360, 'frames': 6}, 'Emerald': {'id': '723', 'delta': 600,", "in self._get_time_strs(): fg = self._get_wximg(time_str) if fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg),", "360, 'frames': 6}, 'NorfolkIs': {'id': '623', 'delta': 600, 'frames': 4}, 'NWTasmania': {'id': '523',", "'delta': 600, 'frames': 4}, 'Wyndham': {'id': '073', 'delta': 600, 'frames': 4}, 'Yarrawonga': {'id':", "6}, 'PortHedland': {'id': '163', 'delta': 600, 'frames': 4}, 'SellicksHill': {'id': '463', 'delta': 600,", "'delta': 360, 'frames': 6}, 'Albany': {'id': '313', 'delta': 600, 'frames': 4}, 'AliceSprings': {'id':", "the BOM. ''' self._log.debug('Getting image %s', url) response = requests.get(url) if response.status_code ==", "radar_id = '%03d' % radar_id valids = ', '.join(sorted(RADARS.keys())) if not radar_id and", "{'id': '163', 'delta': 600, 'frames': 4}, 'SellicksHill': {'id': '463', 'delta': 600, 'frames': 4},", "BOM colorbar legend image. ''' self._log.debug('Getting legend at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png')", "'%s' (valid locations are: %s)\", location, valids) if radar_id: if location in RADARS:", "and not frames: frames = 6 self._log.error(\"No 'frames' specified for radar ID %s,", "suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id) background = self._get_image(url0) if background", "delta or RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id']", "and return the frames, ignoring any blanks. If no frames were produced, return", "'753', 'delta': 360, 'frames': 6}, 'MtGambier': {'id': '143', 'delta': 600, 'frames': 4}, 'Namoi':", "360, 'frames': 6}, 'Weipa': {'id': '783', 'delta': 360, 'frames': 6}, 'WillisIs': {'id': '413',", "markings, and merge into a single image. ''' self._log.debug('Getting background for %s at", "self._log.error(\"No 'frames' specified for radar ID %s, using %s\", radar_id, frames) self._location =", "'delta': 600, 'frames': 4}, 'Gladstone': {'id': '233', 'delta': 600, 'frames': 4}, 'Gove': {'id':", "'delta': 600, 'frames': 4}, 'Grafton': {'id': '283', 'delta': 600, 'frames': 4}, 'Gympie': {'id':", "Return the current BOM radar-loop image. ''' now = int(time.time()) t1 = now", "len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO frames", "= os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could not create directory", "= image.convert('RGBA') image.close() return rgba_img return None def _get_legend(self): ''' Fetch the BOM", "'WaggaWagga': {'id': '553', 'delta': 600, 'frames': 4}, 'Warrego': {'id': '673', 'delta': 600, 'frames':", "so the caller must deal with that possibility. ''' self._log.debug('Getting radar imagery for", "Return a radar weather image from the BOM website. Note that get_image() returns", "4}, 'Bairnsdale': {'id': '683', 'delta': 600, 'frames': 4}, 'Bowen': {'id': '243', 'delta': 600,", "PIL.Image.alpha_composite(background, image) return background def _get_frames(self): ''' Fetch a radar image for each", "'delta': 360, 'frames': 6}, 'MtGambier': {'id': '143', 'delta': 600, 'frames': 4}, 'Namoi': {'id':", "legend, and a radar image. ''' self._log.info('Getting loop for %s at %s', self._location,", "% self._delta) if t1 > self._t0: self._t0 = t1 self._current = self._get_loop() return", "360, 'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta': 360, 'frames': 6}, 'Katherine': {'id': '423',", "4}, 'MorningtonIs': {'id': '363', 'delta': 600, 'frames': 4}, 'MountIsa': {'id': '753', 'delta': 360,", "in RADARS: radar_id = None self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\") elif location: self._log.error(\"Bad", "'location' specified, using ID %s (valid locations are: %s)\", radar_id, valids) if radar_id", "600, 'frames': 4}, 'Newcastle': {'id': '043', 'delta': 360, 'frames': 6}, 'Newdegate': {'id': '383',", "self._outfile = outfile self._t0 = 0 self._current = self.current # Public methods @property", "'583', 'delta': 360, 'frames': 6}, 'Sydney': {'id': '713', 'delta': 360, 'frames': 6}, 'Townsville':", "single image. ''' self._log.debug('Getting background for %s at %s', self._location, self._t0) suffix0 =", "def _get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting URL for path %s', path) return", "'173', 'delta': 600, 'frames': 4}, 'Cairns': {'id': '193', 'delta': 360, 'frames': 6}, 'Canberra':", "radar_id and not delta: delta = 360 self._log.error(\"No 'delta' specified for radar ID", "ID %s, using %s\", radar_id, delta) if radar_id and not frames: frames =", "_get_legend(self): ''' Fetch the BOM colorbar legend image. ''' self._log.debug('Getting legend at %s',", "be used to create the animated GIF. ''' self._log.debug('Getting time strings starting at", "from the BOM website. Note that get_image() returns None if the image could", "'Kalgoorlie': {'id': '483', 'delta': 360, 'frames': 6}, 'Katherine': {'id': '423', 'delta': 360, 'frames':", "possibility. ''' self._log.debug('Getting radar imagery for %s at %s', self._location, time_str) suffix =", "YYYYMMDDHHMM times for the most recent set of radar images to be used", "'Canberra': {'id': '403', 'delta': 360, 'frames': 6}, 'Carnarvon': {'id': '053', 'delta': 600, 'frames':", "location, valids) if radar_id: if location in RADARS: radar_id = None self._log.error(\"Valid 'location'", "valids) if radar_id: if location in RADARS: radar_id = None self._log.error(\"Valid 'location' specified,", "write image to %s', self._outfile) return loop.getvalue() def _get_time_strs(self): ''' Return a list", "frames = [] if bg and legend: for time_str in self._get_time_strs(): fg =", "None def _get_legend(self): ''' Fetch the BOM colorbar legend image. ''' self._log.debug('Getting legend", "'233', 'delta': 600, 'frames': 4}, 'Gove': {'id': '093', 'delta': 600, 'frames': 4}, 'Grafton':", "not None: background = PIL.Image.alpha_composite(background, image) return background def _get_frames(self): ''' Fetch a", "if background is None: return None for layer in ('topography', 'locations', 'range'): self._log.debug('Getting", "most recent set of radar images to be used to create the animated", "'delta': 600, 'frames': 4}, 'Hobart': {'id': '763', 'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id':", "6}, 'Woomera': {'id': '273', 'delta': 600, 'frames': 4}, 'Wyndham': {'id': '073', 'delta': 600,", "= radar_id or RADARS[location]['id'] self._outfile = outfile self._t0 = 0 self._current = self.current", "BOM website. Note that get_image() returns None if the image could not be", "'413', 'delta': 600, 'frames': 4}, 'Wollongong': {'id': '033', 'delta': 360, 'frames': 6}, 'Woomera':", "self._get_legend() frames = [] if bg and legend: for time_str in self._get_time_strs(): fg", "on the legend to produce a frame. Collect and return the frames, ignoring", "PIL.Image import requests RADARS = { 'Adelaide': {'id': '643', 'delta': 360, 'frames': 6},", "'frames': 6}, 'Woomera': {'id': '273', 'delta': 600, 'frames': 4}, 'Wyndham': {'id': '073', 'delta':", "''' Fetch the background map, then the topography, locations (e.g. city names), and", "self._log.debug('Getting image %s', url) response = requests.get(url) if response.status_code == 200: image =", "list of strings representing YYYYMMDDHHMM times for the most recent set of radar", "in frame_numbers] def _get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting URL for path %s',", "''' self._log.info('Getting loop for %s at %s', self._location, self._t0) loop = io.BytesIO() frames", "radar weather image from the BOM website. Note that get_image() returns None if", "into a single image. ''' self._log.debug('Getting background for %s at %s', self._location, self._t0)", "(self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in frame_numbers] def _get_url(self, path):", "for %s at %s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile: outdir", "Fetch an image from the BOM. ''' self._log.debug('Getting image %s', url) response =", "are: %s)\", radar_id, valids) if radar_id and not delta: delta = 360 self._log.error(\"No", "'323', 'delta': 600, 'frames': 4}, 'Geraldton': {'id': '063', 'delta': 600, 'frames': 4}, 'Giles':", "produce a frame. Collect and return the frames, ignoring any blanks. If no", "'Namoi': {'id': '693', 'delta': 600, 'frames': 4}, 'Newcastle': {'id': '043', 'delta': 360, 'frames':", "600, 'frames': 4}, 'Wollongong': {'id': '033', 'delta': 360, 'frames': 6}, 'Woomera': {'id': '273',", "= self._get_legend() frames = [] if bg and legend: for time_str in self._get_time_strs():", "legend = self._get_legend() frames = [] if bg and legend: for time_str in", "= outfile self._t0 = 0 self._current = self.current # Public methods @property def", "'Darwin': {'id': '633', 'delta': 360, 'frames': 6}, 'Emerald': {'id': '723', 'delta': 600, 'frames':", "the frames, ignoring any blanks. If no frames were produced, return None (the", "''' Fetch the BOM colorbar legend image. ''' self._log.debug('Getting legend at %s', self._t0)", "frames is not None: self._log.debug('Got %s frames for %s at %s', len(frames), self._location,", "self._t0) frame_numbers = range(self._frames, 0, -1) tz = dt.timezone.utc f = lambda n:", "and not delta: delta = 360 self._log.error(\"No 'delta' specified for radar ID %s,", "'delta': 600, 'frames': 4}, 'Mackay': {'id': '223', 'delta': 600, 'frames': 4}, 'Marburg': {'id':", "self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO frames for", "# Private methods def _get_background(self): ''' Fetch the background map, then the topography,", "'Broome': {'id': '173', 'delta': 600, 'frames': 4}, 'Cairns': {'id': '193', 'delta': 360, 'frames':", "'frames': 4}, 'Grafton': {'id': '283', 'delta': 600, 'frames': 4}, 'Gympie': {'id': '083', 'delta':", "url0 = self._get_url(suffix0 % self._radar_id) background = self._get_image(url0) if background is None: return", "'range'): self._log.debug('Getting %s for %s at %s', layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png'", "None: background = PIL.Image.alpha_composite(background, image) return background def _get_frames(self): ''' Fetch a radar", "radar_id self._delta = delta or RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames'] self._radar_id =", "6}, 'Sydney': {'id': '713', 'delta': 360, 'frames': 6}, 'Townsville': {'id': '733', 'delta': 600,", "'delta': 600, 'frames': 4}, 'Namoi': {'id': '693', 'delta': 600, 'frames': 4}, 'Newcastle': {'id':", "'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames': 6}, 'Sydney': {'id':", "'Adelaide': {'id': '643', 'delta': 360, 'frames': 6}, 'Albany': {'id': '313', 'delta': 600, 'frames':", "- (now % self._delta) if t1 > self._t0: self._t0 = t1 self._current =", "url) response = requests.get(url) if response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img =", "'Albany': {'id': '313', 'delta': 600, 'frames': 4}, 'AliceSprings': {'id': '253', 'delta': 600, 'frames':", "4}, 'Gladstone': {'id': '233', 'delta': 600, 'frames': 4}, 'Gove': {'id': '093', 'delta': 600,", "{'id': '083', 'delta': 360, 'frames': 6}, 'HallsCreek': {'id': '393', 'delta': 600, 'frames': 4},", "{'id': '383', 'delta': 360, 'frames': 6}, 'NorfolkIs': {'id': '623', 'delta': 600, 'frames': 4},", "self._get_loop() return self._current # Private methods def _get_background(self): ''' Fetch the background map,", "'frames': 6}, 'Broome': {'id': '173', 'delta': 600, 'frames': 4}, 'Cairns': {'id': '193', 'delta':", "{'id': '413', 'delta': 600, 'frames': 4}, 'Wollongong': {'id': '033', 'delta': 360, 'frames': 6},", "6}, 'Kalgoorlie': {'id': '483', 'delta': 360, 'frames': 6}, 'Katherine': {'id': '423', 'delta': 360,", "supplemental layers, a colorbar legend, and a radar image. ''' self._log.info('Getting loop for", "for %s at %s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' % (self._radar_id, time_str) url", "image.convert('RGBA') image.close() return rgba_img return None def _get_legend(self): ''' Fetch the BOM colorbar", "self._outfile: outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could not", "if t1 > self._t0: self._t0 = t1 self._current = self._get_loop() return self._current #", "360, 'frames': 6}, 'Canberra': {'id': '403', 'delta': 360, 'frames': 6}, 'Carnarvon': {'id': '053',", "'643', 'delta': 360, 'frames': 6}, 'Albany': {'id': '313', 'delta': 600, 'frames': 4}, 'AliceSprings':", "4}, 'Hobart': {'id': '763', 'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta': 360,", "'delta': 600, 'frames': 4}, 'Wollongong': {'id': '033', 'delta': 360, 'frames': 6}, 'Woomera': {'id':", "(e.g. city names), and distance-from-radar range markings, and merge into a single image.", "4}, 'Cairns': {'id': '193', 'delta': 360, 'frames': 6}, 'Canberra': {'id': '403', 'delta': 360,", "a colorbar legend, and a radar image. ''' self._log.info('Getting loop for %s at", "at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): ''' Return an", "except IOError: self._log.error('Could not write image to %s', self._outfile) return loop.getvalue() def _get_time_strs(self):", "specified, using '%s' (valid locations are: %s)\", location, valids) if radar_id: if location", "'delta': 600, 'frames': 4}, 'Gove': {'id': '093', 'delta': 600, 'frames': 4}, 'Grafton': {'id':", "return frames or None def _get_image(self, url): # pylint: disable=no-self-use ''' Fetch an", "self._current = self._get_loop() return self._current # Private methods def _get_background(self): ''' Fetch the", "_get_frames(self): ''' Fetch a radar image for each expected time, composite it with", "'delta': 600, 'frames': 4}, 'Newcastle': {'id': '043', 'delta': 360, 'frames': 6}, 'Newdegate': {'id':", "600, 'frames': 4}, 'MountIsa': {'id': '753', 'delta': 360, 'frames': 6}, 'MtGambier': {'id': '143',", "{'id': '663', 'delta': 360, 'frames': 6}, 'Broome': {'id': '173', 'delta': 600, 'frames': 4},", "= logger or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id = '%03d' % radar_id valids", "'563', 'delta': 600, 'frames': 4}, 'Mackay': {'id': '223', 'delta': 600, 'frames': 4}, 'Marburg':", "open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write image to %s',", "not radar_id and location not in RADARS: location = 'Sydney' self._log.error(\"Bad 'location' specified,", "{'id': '753', 'delta': 360, 'frames': 6}, 'MtGambier': {'id': '143', 'delta': 600, 'frames': 4},", "self._log.error(\"Bad 'location' specified, using '%s' (valid locations are: %s)\", location, valids) if radar_id:", "4}, 'Yarrawonga': {'id': '493', 'delta': 360, 'frames': 6}, } class BOMRadarLoop: def __init__(self,", "= 0 self._current = self.current # Public methods @property def current(self): ''' Return", "''' Fetch an image from the BOM. ''' self._log.debug('Getting image %s', url) response", "layers, a colorbar legend, and a radar image. ''' self._log.info('Getting loop for %s", "'Carnarvon': {'id': '053', 'delta': 600, 'frames': 4}, 'Ceduna': {'id': '333', 'delta': 600, 'frames':", "'273', 'delta': 600, 'frames': 4}, 'Wyndham': {'id': '073', 'delta': 600, 'frames': 4}, 'Yarrawonga':", "return loop.getvalue() def _get_time_strs(self): ''' Return a list of strings representing YYYYMMDDHHMM times", "deal with that possibility. ''' self._log.debug('Getting radar imagery for %s at %s', self._location,", "'delta': 360, 'frames': 6}, 'Canberra': {'id': '403', 'delta': 360, 'frames': 6}, 'Carnarvon': {'id':", "600, 'frames': 4}, 'Namoi': {'id': '693', 'delta': 600, 'frames': 4}, 'Newcastle': {'id': '043',", "image for each expected time, composite it with a common background image, then", "a radar image for each expected time, composite it with a common background", "import os import time import PIL.Image import requests RADARS = { 'Adelaide': {'id':", "the legend to produce a frame. Collect and return the frames, ignoring any", "4}, 'MountIsa': {'id': '753', 'delta': 360, 'frames': 6}, 'MtGambier': {'id': '143', 'delta': 600,", "'frames': 4}, 'Dampier': {'id': '153', 'delta': 600, 'frames': 4}, 'Darwin': {'id': '633', 'delta':", "4}, 'Giles': {'id': '443', 'delta': 600, 'frames': 4}, 'Gladstone': {'id': '233', 'delta': 600,", "'Sydney': {'id': '713', 'delta': 360, 'frames': 6}, 'Townsville': {'id': '733', 'delta': 600, 'frames':", "'frames': 4}, 'Warruwi': {'id': '773', 'delta': 360, 'frames': 6}, 'Watheroo': {'id': '793', 'delta':", "'503', 'delta': 600, 'frames': 4}, 'Melbourne': {'id': '023', 'delta': 360, 'frames': 6}, 'Mildura':", "now = int(time.time()) t1 = now - (now % self._delta) if t1 >", "'Hobart': {'id': '763', 'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta': 360, 'frames':", "'location' specified, using '%s' (valid locations are: %s)\", location, valids) if radar_id: if", "frames or RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id'] self._outfile = outfile self._t0 =", "legend image. ''' self._log.debug('Getting legend at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url)", "tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in frame_numbers] def _get_url(self, path): # pylint: disable=no-self-use", "tz = dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M')", "'153', 'delta': 600, 'frames': 4}, 'Darwin': {'id': '633', 'delta': 360, 'frames': 6}, 'Emerald':", "city names), and distance-from-radar range markings, and merge into a single image. '''", "if isinstance(radar_id, int): radar_id = '%03d' % radar_id valids = ', '.join(sorted(RADARS.keys())) if", "frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO frames for %s at", "strings starting at %s', self._t0) frame_numbers = range(self._frames, 0, -1) tz = dt.timezone.utc", "6}, 'Mildura': {'id': '303', 'delta': 600, 'frames': 4}, 'Moree': {'id': '533', 'delta': 600,", "a radar image. ''' self._log.info('Getting loop for %s at %s', self._location, self._t0) loop", "the BOM colorbar legend image. ''' self._log.debug('Getting legend at %s', self._t0) url =", "radar_id, valids) if radar_id and not delta: delta = 360 self._log.error(\"No 'delta' specified", "radar image. ''' self._log.info('Getting loop for %s at %s', self._location, self._t0) loop =", "names), and distance-from-radar range markings, and merge into a single image. ''' self._log.debug('Getting", "360, 'frames': 6}, 'Woomera': {'id': '273', 'delta': 600, 'frames': 4}, 'Wyndham': {'id': '073',", "{'id': '673', 'delta': 600, 'frames': 4}, 'Warruwi': {'id': '773', 'delta': 360, 'frames': 6},", "4}, 'Brisbane': {'id': '663', 'delta': 360, 'frames': 6}, 'Broome': {'id': '173', 'delta': 600,", "'Watheroo': {'id': '793', 'delta': 360, 'frames': 6}, 'Weipa': {'id': '783', 'delta': 360, 'frames':", "= self.current # Public methods @property def current(self): ''' Return the current BOM", "'723', 'delta': 600, 'frames': 4}, 'Esperance': {'id': '323', 'delta': 600, 'frames': 4}, 'Geraldton':", "not write image to %s', self._outfile) return loop.getvalue() def _get_time_strs(self): ''' Return a", "''' Return the current BOM radar-loop image. ''' now = int(time.time()) t1 =", "not frames: frames = 6 self._log.error(\"No 'frames' specified for radar ID %s, using", "= PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return rgba_img return None def _get_legend(self): '''", "6}, 'NorfolkIs': {'id': '623', 'delta': 600, 'frames': 4}, 'NWTasmania': {'id': '523', 'delta': 360,", "io import logging import os import time import PIL.Image import requests RADARS =", "import requests RADARS = { 'Adelaide': {'id': '643', 'delta': 360, 'frames': 6}, 'Albany':", "frames were produced, return None (the caller must expect this). ''' self._log.debug('Getting frames", "self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO frames for %s", "If no frames were produced, return None (the caller must expect this). '''", "expect this). ''' self._log.debug('Getting frames for %s at %s', self._location, self._t0) bg =", "self._location, self._t0) bg = self._get_background() legend = self._get_legend() frames = [] if bg", "frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames or None def _get_image(self, url): #", "360, 'frames': 6}, } class BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None,", "if frames is not None: self._log.debug('Got %s frames for %s at %s', len(frames),", "set of frames, where each frame includes a background, one or more supplemental", "'243', 'delta': 600, 'frames': 4}, 'Brisbane': {'id': '663', 'delta': 360, 'frames': 6}, 'Broome':", "{'id': '423', 'delta': 360, 'frames': 6}, 'Learmonth': {'id': '293', 'delta': 600, 'frames': 4},", "_get_time_strs(self): ''' Return a list of strings representing YYYYMMDDHHMM times for the most", "pylint: disable=no-self-use self._log.debug('Getting URL for path %s', path) return 'http://www.bom.gov.au/%s' % path def", "valids) if radar_id and not delta: delta = 360 self._log.error(\"No 'delta' specified for", "600, 'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames': 6}, 'Sydney': {'id': '713',", "frames) self._location = location or 'ID %s' % radar_id self._delta = delta or", "url1 = self._get_url(suffix1) image = self._get_image(url1) if image is not None: background =", "at %s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id) background", "600, 'frames': 4}, 'Melbourne': {'id': '023', 'delta': 360, 'frames': 6}, 'Mildura': {'id': '303',", "return the frames, ignoring any blanks. If no frames were produced, return None", "self._log.error(\"Bad 'location' specified, using ID %s (valid locations are: %s)\", radar_id, valids) if", "''' now = int(time.time()) t1 = now - (now % self._delta) if t1", "'PortHedland': {'id': '163', 'delta': 600, 'frames': 4}, 'SellicksHill': {'id': '463', 'delta': 600, 'frames':", "Fetch the background map, then the topography, locations (e.g. city names), and distance-from-radar", "radar image for each expected time, composite it with a common background image,", "fg), (0, 0)) return frames or None def _get_image(self, url): # pylint: disable=no-self-use", "'frames': 6}, 'Albany': {'id': '313', 'delta': 600, 'frames': 4}, 'AliceSprings': {'id': '253', 'delta':", "'delta': 360, 'frames': 6}, } class BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None, frames=None,", "a common background image, then overlay on the legend to produce a frame.", "%s', path) return 'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str): ''' Return a radar", "'Newcastle': {'id': '043', 'delta': 360, 'frames': 6}, 'Newdegate': {'id': '383', 'delta': 360, 'frames':", "radar ID %s, using %s\", radar_id, frames) self._location = location or 'ID %s'", "{'id': '253', 'delta': 600, 'frames': 4}, 'Bairnsdale': {'id': '683', 'delta': 600, 'frames': 4},", "self._delta = delta or RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames'] self._radar_id = radar_id", "# pylint: disable=no-self-use self._log.debug('Getting URL for path %s', path) return 'http://www.bom.gov.au/%s' % path", "composite it with a common background image, then overlay on the legend to", "or RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id'] self._outfile", "600, 'frames': 4}, 'Yarrawonga': {'id': '493', 'delta': 360, 'frames': 6}, } class BOMRadarLoop:", "self._t0) bg = self._get_background() legend = self._get_legend() frames = [] if bg and", "NO frames for %s at %s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if", "delta) if radar_id and not frames: frames = 6 self._log.error(\"No 'frames' specified for", "must expect this). ''' self._log.debug('Getting frames for %s at %s', self._location, self._t0) bg", "image. ''' self._log.info('Getting loop for %s at %s', self._location, self._t0) loop = io.BytesIO()", "self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir):", "'delta': 360, 'frames': 6}, 'Watheroo': {'id': '793', 'delta': 360, 'frames': 6}, 'Weipa': {'id':", "6}, 'Watheroo': {'id': '793', 'delta': 360, 'frames': 6}, 'Weipa': {'id': '783', 'delta': 360,", "fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames or None", "600, 'frames': 4}, 'Grafton': {'id': '283', 'delta': 600, 'frames': 4}, 'Gympie': {'id': '083',", "rgba_img return None def _get_legend(self): ''' Fetch the BOM colorbar legend image. '''", "in RADARS: location = 'Sydney' self._log.error(\"Bad 'location' specified, using '%s' (valid locations are:", "4}, 'Wollongong': {'id': '033', 'delta': 360, 'frames': 6}, 'Woomera': {'id': '273', 'delta': 600,", "None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames or None def _get_image(self, url):", "= 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id) background = self._get_image(url0) if background is", "radar images to be used to create the animated GIF. ''' self._log.debug('Getting time", "'Cairns': {'id': '193', 'delta': 360, 'frames': 6}, 'Canberra': {'id': '403', 'delta': 360, 'frames':", "''' self._log.debug('Getting image %s', url) response = requests.get(url) if response.status_code == 200: image", "caller must deal with that possibility. ''' self._log.debug('Getting radar imagery for %s at", "360, 'frames': 6}, 'PortHedland': {'id': '163', 'delta': 600, 'frames': 4}, 'SellicksHill': {'id': '463',", "'frames': 4}, 'Gladstone': {'id': '233', 'delta': 600, 'frames': 4}, 'Gove': {'id': '093', 'delta':", "self._radar_id) background = self._get_image(url0) if background is None: return None for layer in", "url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): ''' Return an animated GIF comprising", "for time_str in self._get_time_strs(): fg = self._get_wximg(time_str) if fg is not None: frames.append(legend.copy())", "'Sydney' self._log.error(\"Bad 'location' specified, using '%s' (valid locations are: %s)\", location, valids) if", "response = requests.get(url) if response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA')", "time_str in self._get_time_strs(): fg = self._get_wximg(time_str) if fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg,", "frame. Collect and return the frames, ignoring any blanks. If no frames were", "'delta': 600, 'frames': 4}, 'Bowen': {'id': '243', 'delta': 600, 'frames': 4}, 'Brisbane': {'id':", "each expected time, composite it with a common background image, then overlay on", "'delta': 600, 'frames': 4}, 'Marburg': {'id': '503', 'delta': 600, 'frames': 4}, 'Melbourne': {'id':", "'delta': 600, 'frames': 4}, 'Darwin': {'id': '633', 'delta': 360, 'frames': 6}, 'Emerald': {'id':", "{'id': '523', 'delta': 360, 'frames': 6}, 'Perth': {'id': '703', 'delta': 360, 'frames': 6},", "delta = 360 self._log.error(\"No 'delta' specified for radar ID %s, using %s\", radar_id,", "self._t0 = t1 self._current = self._get_loop() return self._current # Private methods def _get_background(self):", "bg and legend: for time_str in self._get_time_strs(): fg = self._get_wximg(time_str) if fg is", "for n in frame_numbers] def _get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting URL for", "outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could not create", "imagery for %s at %s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' % (self._radar_id, time_str)", "image, then overlay on the legend to produce a frame. Collect and return", "returns None if the image could not be fetched, so the caller must", "'delta': 600, 'frames': 4}, 'Geraldton': {'id': '063', 'delta': 600, 'frames': 4}, 'Giles': {'id':", "BOM radar-loop image. ''' now = int(time.time()) t1 = now - (now %", "{'id': '723', 'delta': 600, 'frames': 4}, 'Esperance': {'id': '323', 'delta': 600, 'frames': 4},", "time import PIL.Image import requests RADARS = { 'Adelaide': {'id': '643', 'delta': 360,", "image = self._get_image(url1) if image is not None: background = PIL.Image.alpha_composite(background, image) return", "600, 'frames': 4}, 'Gladstone': {'id': '233', 'delta': 600, 'frames': 4}, 'Gove': {'id': '093',", "'frames': 4}, 'MountIsa': {'id': '753', 'delta': 360, 'frames': 6}, 'MtGambier': {'id': '143', 'delta':", "'delta': 360, 'frames': 6}, 'Broome': {'id': '173', 'delta': 600, 'frames': 4}, 'Cairns': {'id':", "time, composite it with a common background image, then overlay on the legend", "'WillisIs': {'id': '413', 'delta': 600, 'frames': 4}, 'Wollongong': {'id': '033', 'delta': 360, 'frames':", "600, 'frames': 4}, 'Bairnsdale': {'id': '683', 'delta': 600, 'frames': 4}, 'Bowen': {'id': '243',", "os.makedirs(outdir) except OSError: self._log.error('Could not create directory %s', outdir) try: with open(self._outfile, 'wb')", "radar imagery for %s at %s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' % (self._radar_id,", "int(time.time()) t1 = now - (now % self._delta) if t1 > self._t0: self._t0", "distance-from-radar range markings, and merge into a single image. ''' self._log.debug('Getting background for", "= self._get_url(suffix0 % self._radar_id) background = self._get_image(url0) if background is None: return None", "= 6 self._log.error(\"No 'frames' specified for radar ID %s, using %s\", radar_id, frames)", "Private methods def _get_background(self): ''' Fetch the background map, then the topography, locations", "merge into a single image. ''' self._log.debug('Getting background for %s at %s', self._location,", "%s', self._location, self._t0) bg = self._get_background() legend = self._get_legend() frames = [] if", "(now % self._delta) if t1 > self._t0: self._t0 = t1 self._current = self._get_loop()", "'Katherine': {'id': '423', 'delta': 360, 'frames': 6}, 'Learmonth': {'id': '293', 'delta': 600, 'frames':", "with a common background image, then overlay on the legend to produce a", "if self._outfile: outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could", "'delta' specified for radar ID %s, using %s\", radar_id, delta) if radar_id and", "to be used to create the animated GIF. ''' self._log.debug('Getting time strings starting", "6}, } class BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log", "'483', 'delta': 360, 'frames': 6}, 'Katherine': {'id': '423', 'delta': 360, 'frames': 6}, 'Learmonth':", "image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return rgba_img return None def _get_legend(self):", "self._get_url(suffix0 % self._radar_id) background = self._get_image(url0) if background is None: return None for", "6}, 'Carnarvon': {'id': '053', 'delta': 600, 'frames': 4}, 'Ceduna': {'id': '333', 'delta': 600,", "at %s', self._t0) frame_numbers = range(self._frames, 0, -1) tz = dt.timezone.utc f =", "_get_image(self, url): # pylint: disable=no-self-use ''' Fetch an image from the BOM. '''", "using '%s' (valid locations are: %s)\", location, valids) if radar_id: if location in", "'Melbourne': {'id': '023', 'delta': 360, 'frames': 6}, 'Mildura': {'id': '303', 'delta': 600, 'frames':", "frames = 6 self._log.error(\"No 'frames' specified for radar ID %s, using %s\", radar_id,", "600, 'frames': 4}, 'Geraldton': {'id': '063', 'delta': 600, 'frames': 4}, 'Giles': {'id': '443',", "'frames': 4}, 'Geraldton': {'id': '063', 'delta': 600, 'frames': 4}, 'Giles': {'id': '443', 'delta':", "600, 'frames': 4}, 'Warrego': {'id': '673', 'delta': 600, 'frames': 4}, 'Warruwi': {'id': '773',", "6}, 'Weipa': {'id': '783', 'delta': 360, 'frames': 6}, 'WillisIs': {'id': '413', 'delta': 600,", "self._log.error('Could not write image to %s', self._outfile) return loop.getvalue() def _get_time_strs(self): ''' Return", "360, 'frames': 6}, 'Carnarvon': {'id': '053', 'delta': 600, 'frames': 4}, 'Ceduna': {'id': '333',", "response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return rgba_img return", "frames for %s at %s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0,", "%s', outdir) try: with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not", "= dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return", "os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could not create directory %s', outdir) try: with", "self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' % (self._radar_id, time_str) url = self._get_url(suffix) return self._get_image(url)", "{'id': '533', 'delta': 600, 'frames': 4}, 'MorningtonIs': {'id': '363', 'delta': 600, 'frames': 4},", "= 360 self._log.error(\"No 'delta' specified for radar ID %s, using %s\", radar_id, delta)", "if radar_id and not frames: frames = 6 self._log.error(\"No 'frames' specified for radar", "self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id) background = self._get_image(url0)", "'HallsCreek': {'id': '393', 'delta': 600, 'frames': 4}, 'Hobart': {'id': '763', 'delta': 360, 'frames':", "'MorningtonIs': {'id': '363', 'delta': 600, 'frames': 4}, 'MountIsa': {'id': '753', 'delta': 360, 'frames':", "specified for radar ID %s, using %s\", radar_id, delta) if radar_id and not", "delta: delta = 360 self._log.error(\"No 'delta' specified for radar ID %s, using %s\",", "in ('topography', 'locations', 'range'): self._log.debug('Getting %s for %s at %s', layer, self._location, self._t0)", "'frames': 4}, 'Namoi': {'id': '693', 'delta': 600, 'frames': 4}, 'Newcastle': {'id': '043', 'delta':", "'MountIsa': {'id': '753', 'delta': 360, 'frames': 6}, 'MtGambier': {'id': '143', 'delta': 600, 'frames':", "'783', 'delta': 360, 'frames': 6}, 'WillisIs': {'id': '413', 'delta': 600, 'frames': 4}, 'Wollongong':", "def _get_background(self): ''' Fetch the background map, then the topography, locations (e.g. city", "image to %s', self._outfile) return loop.getvalue() def _get_time_strs(self): ''' Return a list of", "'delta': 600, 'frames': 4}, 'Brisbane': {'id': '663', 'delta': 360, 'frames': 6}, 'Broome': {'id':", "@property def current(self): ''' Return the current BOM radar-loop image. ''' now =", "methods def _get_background(self): ''' Fetch the background map, then the topography, locations (e.g.", "layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 = self._get_url(suffix1) image", "import PIL.Image import requests RADARS = { 'Adelaide': {'id': '643', 'delta': 360, 'frames':", "a background, one or more supplemental layers, a colorbar legend, and a radar", "= now - (now % self._delta) if t1 > self._t0: self._t0 = t1", "and location not in RADARS: location = 'Sydney' self._log.error(\"Bad 'location' specified, using '%s'", "= location or 'ID %s' % radar_id self._delta = delta or RADARS[location]['delta'] self._frames", "self._get_wximg(time_str) if fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames", "pylint: disable=no-self-use ''' Fetch an image from the BOM. ''' self._log.debug('Getting image %s',", "4}, 'NWTasmania': {'id': '523', 'delta': 360, 'frames': 6}, 'Perth': {'id': '703', 'delta': 360,", "or RADARS[location]['id'] self._outfile = outfile self._t0 = 0 self._current = self.current # Public", "with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write image to", "'Esperance': {'id': '323', 'delta': 600, 'frames': 4}, 'Geraldton': {'id': '063', 'delta': 600, 'frames':", "4}, 'Geraldton': {'id': '063', 'delta': 600, 'frames': 4}, 'Giles': {'id': '443', 'delta': 600,", "'Weipa': {'id': '783', 'delta': 360, 'frames': 6}, 'WillisIs': {'id': '413', 'delta': 600, 'frames':", "4}, 'SellicksHill': {'id': '463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta': 360,", "'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id) background = self._get_image(url0) if background is None:", "4}, 'Warruwi': {'id': '773', 'delta': 360, 'frames': 6}, 'Watheroo': {'id': '793', 'delta': 360,", "'Bairnsdale': {'id': '683', 'delta': 600, 'frames': 4}, 'Bowen': {'id': '243', 'delta': 600, 'frames':", "'NorfolkIs': {'id': '623', 'delta': 600, 'frames': 4}, 'NWTasmania': {'id': '523', 'delta': 360, 'frames':", "os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could not create directory %s',", "600, 'frames': 4}, 'Dampier': {'id': '153', 'delta': 600, 'frames': 4}, 'Darwin': {'id': '633',", "'delta': 600, 'frames': 4}, 'Yarrawonga': {'id': '493', 'delta': 360, 'frames': 6}, } class", "loop for %s at %s', self._location, self._t0) loop = io.BytesIO() frames = self._get_frames()", "'Mildura': {'id': '303', 'delta': 600, 'frames': 4}, 'Moree': {'id': '533', 'delta': 600, 'frames':", "{'id': '033', 'delta': 360, 'frames': 6}, 'Woomera': {'id': '273', 'delta': 600, 'frames': 4},", "(the caller must expect this). ''' self._log.debug('Getting frames for %s at %s', self._location,", "were produced, return None (the caller must expect this). ''' self._log.debug('Getting frames for", "and legend: for time_str in self._get_time_strs(): fg = self._get_wximg(time_str) if fg is not", "= self._get_loop() return self._current # Private methods def _get_background(self): ''' Fetch the background", "a frame. Collect and return the frames, ignoring any blanks. If no frames", "IOError: self._log.error('Could not write image to %s', self._outfile) return loop.getvalue() def _get_time_strs(self): '''", "as dt import io import logging import os import time import PIL.Image import", "'frames': 4}, 'Melbourne': {'id': '023', 'delta': 360, 'frames': 6}, 'Mildura': {'id': '303', 'delta':", "'443', 'delta': 600, 'frames': 4}, 'Gladstone': {'id': '233', 'delta': 600, 'frames': 4}, 'Gove':", "{'id': '063', 'delta': 600, 'frames': 4}, 'Giles': {'id': '443', 'delta': 600, 'frames': 4},", "'frames': 6}, 'Perth': {'id': '703', 'delta': 360, 'frames': 6}, 'PortHedland': {'id': '163', 'delta':", "self._log.info('Getting loop for %s at %s', self._location, self._t0) loop = io.BytesIO() frames =", "'383', 'delta': 360, 'frames': 6}, 'NorfolkIs': {'id': '623', 'delta': 600, 'frames': 4}, 'NWTasmania':", "image could not be fetched, so the caller must deal with that possibility.", "''' self._log.debug('Getting time strings starting at %s', self._t0) frame_numbers = range(self._frames, 0, -1)", "4}, 'WaggaWagga': {'id': '553', 'delta': 600, 'frames': 4}, 'Warrego': {'id': '673', 'delta': 600,", "'delta': 360, 'frames': 6}, 'Sydney': {'id': '713', 'delta': 360, 'frames': 6}, 'Townsville': {'id':", "self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 = self._get_url(suffix1) image =", "format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError:", "to %s', self._outfile) return loop.getvalue() def _get_time_strs(self): ''' Return a list of strings", "includes a background, one or more supplemental layers, a colorbar legend, and a", "self._delta) if t1 > self._t0: self._t0 = t1 self._current = self._get_loop() return self._current", "'delta': 360, 'frames': 6}, 'Woomera': {'id': '273', 'delta': 600, 'frames': 4}, 'Wyndham': {'id':", "600, 'frames': 4}, 'Cairns': {'id': '193', 'delta': 360, 'frames': 6}, 'Canberra': {'id': '403',", "range markings, and merge into a single image. ''' self._log.debug('Getting background for %s", "t1 = now - (now % self._delta) if t1 > self._t0: self._t0 =", "(valid locations are: %s)\", radar_id, valids) if radar_id and not delta: delta =", "append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO frames for %s at %s',", "def current(self): ''' Return the current BOM radar-loop image. ''' now = int(time.time())", "return [f(n) for n in frame_numbers] def _get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting", "4}, 'Namoi': {'id': '693', 'delta': 600, 'frames': 4}, 'Newcastle': {'id': '043', 'delta': 360,", "%s at %s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' % (self._radar_id, time_str) url =", "'delta': 360, 'frames': 6}, 'PortHedland': {'id': '163', 'delta': 600, 'frames': 4}, 'SellicksHill': {'id':", "'673', 'delta': 600, 'frames': 4}, 'Warruwi': {'id': '773', 'delta': 360, 'frames': 6}, 'Watheroo':", "{'id': '733', 'delta': 600, 'frames': 4}, 'WaggaWagga': {'id': '553', 'delta': 600, 'frames': 4},", "self._log.debug('Getting background for %s at %s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 =", "that possibility. ''' self._log.debug('Getting radar imagery for %s at %s', self._location, time_str) suffix", "strings representing YYYYMMDDHHMM times for the most recent set of radar images to", "360, 'frames': 6}, 'Learmonth': {'id': '293', 'delta': 600, 'frames': 4}, 'Longreach': {'id': '563',", "return None def _get_legend(self): ''' Fetch the BOM colorbar legend image. ''' self._log.debug('Getting", "'location' specified, ignoring 'radar_id'\") elif location: self._log.error(\"Bad 'location' specified, using ID %s (valid", "RADARS: radar_id = None self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\") elif location: self._log.error(\"Bad 'location'", "Public methods @property def current(self): ''' Return the current BOM radar-loop image. '''", "{'id': '173', 'delta': 600, 'frames': 4}, 'Cairns': {'id': '193', 'delta': 360, 'frames': 6},", "{ 'Adelaide': {'id': '643', 'delta': 360, 'frames': 6}, 'Albany': {'id': '313', 'delta': 600,", "'713', 'delta': 360, 'frames': 6}, 'Townsville': {'id': '733', 'delta': 600, 'frames': 4}, 'WaggaWagga':", "%s for %s at %s', layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id,", "location or 'ID %s' % radar_id self._delta = delta or RADARS[location]['delta'] self._frames =", "'frames': 4}, 'Mackay': {'id': '223', 'delta': 600, 'frames': 4}, 'Marburg': {'id': '503', 'delta':", "'283', 'delta': 600, 'frames': 4}, 'Gympie': {'id': '083', 'delta': 360, 'frames': 6}, 'HallsCreek':", "else: self._log.warning('Got NO frames for %s at %s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop,", "URL for path %s', path) return 'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str): '''", "'663', 'delta': 360, 'frames': 6}, 'Broome': {'id': '173', 'delta': 600, 'frames': 4}, 'Cairns':", "{'id': '303', 'delta': 600, 'frames': 4}, 'Moree': {'id': '533', 'delta': 600, 'frames': 4},", "360, 'frames': 6}, 'Emerald': {'id': '723', 'delta': 600, 'frames': 4}, 'Esperance': {'id': '323',", "'frames': 4}, 'Brisbane': {'id': '663', 'delta': 360, 'frames': 6}, 'Broome': {'id': '173', 'delta':", "4}, 'Grafton': {'id': '283', 'delta': 600, 'frames': 4}, 'Gympie': {'id': '083', 'delta': 360,", "'frames': 4}, 'NWTasmania': {'id': '523', 'delta': 360, 'frames': 6}, 'Perth': {'id': '703', 'delta':", "for %s at %s', layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer)", "''' Return a list of strings representing YYYYMMDDHHMM times for the most recent", "is not None: self._log.debug('Got %s frames for %s at %s', len(frames), self._location, self._t0)", "times for the most recent set of radar images to be used to", "6}, 'Perth': {'id': '703', 'delta': 360, 'frames': 6}, 'PortHedland': {'id': '163', 'delta': 600,", "for %s at %s', self._location, self._t0) bg = self._get_background() legend = self._get_legend() frames", "{'id': '623', 'delta': 600, 'frames': 4}, 'NWTasmania': {'id': '523', 'delta': 360, 'frames': 6},", "self._log.debug('Got %s frames for %s at %s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500,", "background = PIL.Image.alpha_composite(background, image) return background def _get_frames(self): ''' Fetch a radar image", "PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir): try:", "600, 'frames': 4}, 'Gove': {'id': '093', 'delta': 600, 'frames': 4}, 'Grafton': {'id': '283',", "with that possibility. ''' self._log.debug('Getting radar imagery for %s at %s', self._location, time_str)", "self._log.debug('Getting time strings starting at %s', self._t0) frame_numbers = range(self._frames, 0, -1) tz", "self._location = location or 'ID %s' % radar_id self._delta = delta or RADARS[location]['delta']", "360, 'frames': 6}, 'Sydney': {'id': '713', 'delta': 360, 'frames': 6}, 'Townsville': {'id': '733',", "'793', 'delta': 360, 'frames': 6}, 'Weipa': {'id': '783', 'delta': 360, 'frames': 6}, 'WillisIs':", "'693', 'delta': 600, 'frames': 4}, 'Newcastle': {'id': '043', 'delta': 360, 'frames': 6}, 'Newdegate':", "ignoring 'radar_id'\") elif location: self._log.error(\"Bad 'location' specified, using ID %s (valid locations are:", "'frames': 4}, 'SellicksHill': {'id': '463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta':", "frames: frames = 6 self._log.error(\"No 'frames' specified for radar ID %s, using %s\",", "if image is not None: background = PIL.Image.alpha_composite(background, image) return background def _get_frames(self):", "-1) tz = dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n),", "% self._radar_id) background = self._get_image(url0) if background is None: return None for layer", "the topography, locations (e.g. city names), and distance-from-radar range markings, and merge into", "'Giles': {'id': '443', 'delta': 600, 'frames': 4}, 'Gladstone': {'id': '233', 'delta': 600, 'frames':", "'423', 'delta': 360, 'frames': 6}, 'Learmonth': {'id': '293', 'delta': 600, 'frames': 4}, 'Longreach':", "logger or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id = '%03d' % radar_id valids =", "if radar_id and not delta: delta = 360 self._log.error(\"No 'delta' specified for radar", "= frames or RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id'] self._outfile = outfile self._t0", "'Emerald': {'id': '723', 'delta': 600, 'frames': 4}, 'Esperance': {'id': '323', 'delta': 600, 'frames':", "'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames': 6}, 'Sydney': {'id': '713', 'delta': 360, 'frames':", "location in RADARS: radar_id = None self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\") elif location:", "'delta': 360, 'frames': 6}, 'Weipa': {'id': '783', 'delta': 360, 'frames': 6}, 'WillisIs': {'id':", "%s at %s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id)", "self._log.error('Could not create directory %s', outdir) try: with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue())", "600, 'frames': 4}, 'WaggaWagga': {'id': '553', 'delta': 600, 'frames': 4}, 'Warrego': {'id': '673',", "0)) return frames or None def _get_image(self, url): # pylint: disable=no-self-use ''' Fetch", "'delta': 600, 'frames': 4}, 'Melbourne': {'id': '023', 'delta': 360, 'frames': 6}, 'Mildura': {'id':", "360, 'frames': 6}, 'Perth': {'id': '703', 'delta': 360, 'frames': 6}, 'PortHedland': {'id': '163',", "overlay on the legend to produce a frame. Collect and return the frames,", "'delta': 360, 'frames': 6}, 'Mildura': {'id': '303', 'delta': 600, 'frames': 4}, 'Moree': {'id':", "%s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): ''' Return an animated", "animated GIF. ''' self._log.debug('Getting time strings starting at %s', self._t0) frame_numbers = range(self._frames,", "lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in", "that get_image() returns None if the image could not be fetched, so the", "{'id': '143', 'delta': 600, 'frames': 4}, 'Namoi': {'id': '693', 'delta': 600, 'frames': 4},", "'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames': 6}, 'Sydney': {'id': '713', 'delta':", "t1 > self._t0: self._t0 = t1 self._current = self._get_loop() return self._current # Private", "return background def _get_frames(self): ''' Fetch a radar image for each expected time,", "GIF comprising a set of frames, where each frame includes a background, one", "Fetch a radar image for each expected time, composite it with a common", "return 'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str): ''' Return a radar weather image", "radar_id, delta) if radar_id and not frames: frames = 6 self._log.error(\"No 'frames' specified", "'frames': 4}, 'AliceSprings': {'id': '253', 'delta': 600, 'frames': 4}, 'Bairnsdale': {'id': '683', 'delta':", "'delta': 360, 'frames': 6}, 'NorfolkIs': {'id': '623', 'delta': 600, 'frames': 4}, 'NWTasmania': {'id':", "360, 'frames': 6}, 'Watheroo': {'id': '793', 'delta': 360, 'frames': 6}, 'Weipa': {'id': '783',", "create the animated GIF. ''' self._log.debug('Getting time strings starting at %s', self._t0) frame_numbers", "600, 'frames': 4}, 'Brisbane': {'id': '663', 'delta': 360, 'frames': 6}, 'Broome': {'id': '173',", "image. ''' now = int(time.time()) t1 = now - (now % self._delta) if", "= self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): ''' Return an animated GIF comprising a", "loop.getvalue() def _get_time_strs(self): ''' Return a list of strings representing YYYYMMDDHHMM times for", "requests.get(url) if response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return", "4}, 'Marburg': {'id': '503', 'delta': 600, 'frames': 4}, 'Melbourne': {'id': '023', 'delta': 360,", "360, 'frames': 6}, 'WillisIs': {'id': '413', 'delta': 600, 'frames': 4}, 'Wollongong': {'id': '033',", "self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): ''' Return an animated GIF", "'Geraldton': {'id': '063', 'delta': 600, 'frames': 4}, 'Giles': {'id': '443', 'delta': 600, 'frames':", "= self._get_image(url1) if image is not None: background = PIL.Image.alpha_composite(background, image) return background", "for radar ID %s, using %s\", radar_id, delta) if radar_id and not frames:", "datetime as dt import io import logging import os import time import PIL.Image", "os import time import PIL.Image import requests RADARS = { 'Adelaide': {'id': '643',", "'frames': 6}, 'WillisIs': {'id': '413', 'delta': 600, 'frames': 4}, 'Wollongong': {'id': '033', 'delta':", "background = self._get_image(url0) if background is None: return None for layer in ('topography',", "'frames': 6}, } class BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None):", "'delta': 360, 'frames': 6}, 'Emerald': {'id': '723', 'delta': 600, 'frames': 4}, 'Esperance': {'id':", "{'id': '783', 'delta': 360, 'frames': 6}, 'WillisIs': {'id': '413', 'delta': 600, 'frames': 4},", "%s\", radar_id, frames) self._location = location or 'ID %s' % radar_id self._delta =", "'333', 'delta': 600, 'frames': 4}, 'Dampier': {'id': '153', 'delta': 600, 'frames': 4}, 'Darwin':", "self._current # Private methods def _get_background(self): ''' Fetch the background map, then the", "= [] if bg and legend: for time_str in self._get_time_strs(): fg = self._get_wximg(time_str)", "as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write image to %s', self._outfile) return", "{'id': '273', 'delta': 600, 'frames': 4}, 'Wyndham': {'id': '073', 'delta': 600, 'frames': 4},", "360, 'frames': 6}, 'Katherine': {'id': '423', 'delta': 360, 'frames': 6}, 'Learmonth': {'id': '293',", "not None: self._log.debug('Got %s frames for %s at %s', len(frames), self._location, self._t0) frames[0].save(loop,", "'frames' specified for radar ID %s, using %s\", radar_id, frames) self._location = location", "this). ''' self._log.debug('Getting frames for %s at %s', self._location, self._t0) bg = self._get_background()", "{'id': '583', 'delta': 360, 'frames': 6}, 'Sydney': {'id': '713', 'delta': 360, 'frames': 6},", "360, 'frames': 6}, 'MtGambier': {'id': '143', 'delta': 600, 'frames': 4}, 'Namoi': {'id': '693',", "'frames': 4}, 'MorningtonIs': {'id': '363', 'delta': 600, 'frames': 4}, 'MountIsa': {'id': '753', 'delta':", "radar_id = None self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\") elif location: self._log.error(\"Bad 'location' specified,", "'delta': 600, 'frames': 4}, 'Warruwi': {'id': '773', 'delta': 360, 'frames': 6}, 'Watheroo': {'id':", "None self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\") elif location: self._log.error(\"Bad 'location' specified, using ID", "outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write image to %s', self._outfile) return loop.getvalue()", "time strings starting at %s', self._t0) frame_numbers = range(self._frames, 0, -1) tz =", "self._log.debug('Getting radar imagery for %s at %s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' %", "%s', url) response = requests.get(url) if response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img", "loop=0, save_all=True) else: self._log.warning('Got NO frames for %s at %s', self._location, self._t0) PIL.Image.new('RGB',", "image.close() return rgba_img return None def _get_legend(self): ''' Fetch the BOM colorbar legend", "'533', 'delta': 600, 'frames': 4}, 'MorningtonIs': {'id': '363', 'delta': 600, 'frames': 4}, 'MountIsa':", "if location in RADARS: radar_id = None self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\") elif", "self._get_image(url) def _get_loop(self): ''' Return an animated GIF comprising a set of frames,", "location = 'Sydney' self._log.error(\"Bad 'location' specified, using '%s' (valid locations are: %s)\", location,", "%s at %s', self._location, self._t0) bg = self._get_background() legend = self._get_legend() frames =", "= self._get_background() legend = self._get_legend() frames = [] if bg and legend: for", "'frames': 4}, 'Moree': {'id': '533', 'delta': 600, 'frames': 4}, 'MorningtonIs': {'id': '363', 'delta':", "topography, locations (e.g. city names), and distance-from-radar range markings, and merge into a", "'delta': 600, 'frames': 4}, 'MorningtonIs': {'id': '363', 'delta': 600, 'frames': 4}, 'MountIsa': {'id':", "= self._get_wximg(time_str) if fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return", "'frames': 4}, 'Esperance': {'id': '323', 'delta': 600, 'frames': 4}, 'Geraldton': {'id': '063', 'delta':", "'delta': 600, 'frames': 4}, 'Cairns': {'id': '193', 'delta': 360, 'frames': 6}, 'Canberra': {'id':", "{'id': '443', 'delta': 600, 'frames': 4}, 'Gladstone': {'id': '233', 'delta': 600, 'frames': 4},", "delta=None, frames=None, outfile=None, logger=None): self._log = logger or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id", "if response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return rgba_img", "more supplemental layers, a colorbar legend, and a radar image. ''' self._log.info('Getting loop", "'763', 'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta': 360, 'frames': 6}, 'Katherine':", "= self._get_image(url0) if background is None: return None for layer in ('topography', 'locations',", "frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames or None def _get_image(self, url): # pylint:", "600, 'frames': 4}, 'MorningtonIs': {'id': '363', 'delta': 600, 'frames': 4}, 'MountIsa': {'id': '753',", "'frames': 4}, 'Cairns': {'id': '193', 'delta': 360, 'frames': 6}, 'Canberra': {'id': '403', 'delta':", "valids = ', '.join(sorted(RADARS.keys())) if not radar_id and location not in RADARS: location", "background image, then overlay on the legend to produce a frame. Collect and", "legend: for time_str in self._get_time_strs(): fg = self._get_wximg(time_str) if fg is not None:", "def _get_image(self, url): # pylint: disable=no-self-use ''' Fetch an image from the BOM.", "for the most recent set of radar images to be used to create", "self._t0 = 0 self._current = self.current # Public methods @property def current(self): '''", "legend to produce a frame. Collect and return the frames, ignoring any blanks.", "'Gladstone': {'id': '233', 'delta': 600, 'frames': 4}, 'Gove': {'id': '093', 'delta': 600, 'frames':", "'frames': 4}, 'Yarrawonga': {'id': '493', 'delta': 360, 'frames': 6}, } class BOMRadarLoop: def", "'delta': 600, 'frames': 4}, 'SellicksHill': {'id': '463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id':", "600, 'frames': 4}, 'Hobart': {'id': '763', 'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id': '483',", "'frames': 6}, 'HallsCreek': {'id': '393', 'delta': 600, 'frames': 4}, 'Hobart': {'id': '763', 'delta':", "= self._get_frames() if frames is not None: self._log.debug('Got %s frames for %s at", "get_image() returns None if the image could not be fetched, so the caller", "self._get_time_strs(): fg = self._get_wximg(time_str) if fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0,", "'frames': 6}, 'Watheroo': {'id': '793', 'delta': 360, 'frames': 6}, 'Weipa': {'id': '783', 'delta':", "200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return rgba_img return None def", "6}, 'WillisIs': {'id': '413', 'delta': 600, 'frames': 4}, 'Wollongong': {'id': '033', 'delta': 360,", "'frames': 6}, 'Learmonth': {'id': '293', 'delta': 600, 'frames': 4}, 'Longreach': {'id': '563', 'delta':", "'Wollongong': {'id': '033', 'delta': 360, 'frames': 6}, 'Woomera': {'id': '273', 'delta': 600, 'frames':", "{'id': '493', 'delta': 360, 'frames': 6}, } class BOMRadarLoop: def __init__(self, location=None, radar_id=None,", "4}, 'Mackay': {'id': '223', 'delta': 600, 'frames': 4}, 'Marburg': {'id': '503', 'delta': 600,", "6}, 'Albany': {'id': '313', 'delta': 600, 'frames': 4}, 'AliceSprings': {'id': '253', 'delta': 600,", "return rgba_img return None def _get_legend(self): ''' Fetch the BOM colorbar legend image.", "'553', 'delta': 600, 'frames': 4}, 'Warrego': {'id': '673', 'delta': 600, 'frames': 4}, 'Warruwi':", "ID %s, using %s\", radar_id, frames) self._location = location or 'ID %s' %", "image from the BOM website. Note that get_image() returns None if the image", "ignoring any blanks. If no frames were produced, return None (the caller must", "radar-loop image. ''' now = int(time.time()) t1 = now - (now % self._delta)", "self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 = self._get_url(suffix1) image = self._get_image(url1)", "at %s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile)", "image) return background def _get_frames(self): ''' Fetch a radar image for each expected", "{'id': '363', 'delta': 600, 'frames': 4}, 'MountIsa': {'id': '753', 'delta': 360, 'frames': 6},", "return self._current # Private methods def _get_background(self): ''' Fetch the background map, then", "'253', 'delta': 600, 'frames': 4}, 'Bairnsdale': {'id': '683', 'delta': 600, 'frames': 4}, 'Bowen':", "'frames': 6}, 'PortHedland': {'id': '163', 'delta': 600, 'frames': 4}, 'SellicksHill': {'id': '463', 'delta':", "or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id = '%03d' % radar_id valids = ',", "{'id': '223', 'delta': 600, 'frames': 4}, 'Marburg': {'id': '503', 'delta': 600, 'frames': 4},", "dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n)", "'frames': 4}, 'Gympie': {'id': '083', 'delta': 360, 'frames': 6}, 'HallsCreek': {'id': '393', 'delta':", "{'id': '563', 'delta': 600, 'frames': 4}, 'Mackay': {'id': '223', 'delta': 600, 'frames': 4},", "'Bowen': {'id': '243', 'delta': 600, 'frames': 4}, 'Brisbane': {'id': '663', 'delta': 360, 'frames':", "locations are: %s)\", radar_id, valids) if radar_id and not delta: delta = 360", "location: self._log.error(\"Bad 'location' specified, using ID %s (valid locations are: %s)\", radar_id, valids)", "methods @property def current(self): ''' Return the current BOM radar-loop image. ''' now", "%s', self._outfile) return loop.getvalue() def _get_time_strs(self): ''' Return a list of strings representing", "6}, 'MtGambier': {'id': '143', 'delta': 600, 'frames': 4}, 'Namoi': {'id': '693', 'delta': 600,", "RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id'] self._outfile = outfile self._t0 = 0 self._current", "def _get_time_strs(self): ''' Return a list of strings representing YYYYMMDDHHMM times for the", "be fetched, so the caller must deal with that possibility. ''' self._log.debug('Getting radar", "{'id': '293', 'delta': 600, 'frames': 4}, 'Longreach': {'id': '563', 'delta': 600, 'frames': 4},", "'623', 'delta': 600, 'frames': 4}, 'NWTasmania': {'id': '523', 'delta': 360, 'frames': 6}, 'Perth':", "caller must expect this). ''' self._log.debug('Getting frames for %s at %s', self._location, self._t0)", "'delta': 600, 'frames': 4}, 'MountIsa': {'id': '753', 'delta': 360, 'frames': 6}, 'MtGambier': {'id':", "the animated GIF. ''' self._log.debug('Getting time strings starting at %s', self._t0) frame_numbers =", "= lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n", "and merge into a single image. ''' self._log.debug('Getting background for %s at %s',", "time_str): ''' Return a radar weather image from the BOM website. Note that", "if radar_id: if location in RADARS: radar_id = None self._log.error(\"Valid 'location' specified, ignoring", "class BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log = logger", "%s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id) background =", "% radar_id valids = ', '.join(sorted(RADARS.keys())) if not radar_id and location not in", "of frames, where each frame includes a background, one or more supplemental layers,", "# Public methods @property def current(self): ''' Return the current BOM radar-loop image.", "now - (now % self._delta) if t1 > self._t0: self._t0 = t1 self._current", "suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 = self._get_url(suffix1) image = self._get_image(url1) if", "= { 'Adelaide': {'id': '643', 'delta': 360, 'frames': 6}, 'Albany': {'id': '313', 'delta':", "_get_wximg(self, time_str): ''' Return a radar weather image from the BOM website. Note", "'frames': 6}, 'Townsville': {'id': '733', 'delta': 600, 'frames': 4}, 'WaggaWagga': {'id': '553', 'delta':", "range(self._frames, 0, -1) tz = dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta", "import logging import os import time import PIL.Image import requests RADARS = {", "self._get_background() legend = self._get_legend() frames = [] if bg and legend: for time_str", "'delta': 360, 'frames': 6}, 'Carnarvon': {'id': '053', 'delta': 600, 'frames': 4}, 'Ceduna': {'id':", "6}, 'HallsCreek': {'id': '393', 'delta': 600, 'frames': 4}, 'Hobart': {'id': '763', 'delta': 360,", "for %s at %s', self._location, self._t0) loop = io.BytesIO() frames = self._get_frames() if", "{'id': '283', 'delta': 600, 'frames': 4}, 'Gympie': {'id': '083', 'delta': 360, 'frames': 6},", "(self._radar_id, layer) url1 = self._get_url(suffix1) image = self._get_image(url1) if image is not None:", "image %s', url) response = requests.get(url) if response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content))", "[f(n) for n in frame_numbers] def _get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting URL", "for layer in ('topography', 'locations', 'range'): self._log.debug('Getting %s for %s at %s', layer,", "the caller must deal with that possibility. ''' self._log.debug('Getting radar imagery for %s", "'frames': 4}, 'WaggaWagga': {'id': '553', 'delta': 600, 'frames': 4}, 'Warrego': {'id': '673', 'delta':", "disable=no-self-use self._log.debug('Getting URL for path %s', path) return 'http://www.bom.gov.au/%s' % path def _get_wximg(self,", "6}, 'Newdegate': {'id': '383', 'delta': 360, 'frames': 6}, 'NorfolkIs': {'id': '623', 'delta': 600,", "loop = io.BytesIO() frames = self._get_frames() if frames is not None: self._log.debug('Got %s", "600, 'frames': 4}, 'Gympie': {'id': '083', 'delta': 360, 'frames': 6}, 'HallsCreek': {'id': '393',", "from the BOM. ''' self._log.debug('Getting image %s', url) response = requests.get(url) if response.status_code", "%s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile) if", "0, -1) tz = dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta *", "image. ''' self._log.debug('Getting background for %s at %s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png'", "'Townsville': {'id': '733', 'delta': 600, 'frames': 4}, 'WaggaWagga': {'id': '553', 'delta': 600, 'frames':", "6}, 'Learmonth': {'id': '293', 'delta': 600, 'frames': 4}, 'Longreach': {'id': '563', 'delta': 600,", "'delta': 360, 'frames': 6}, 'Townsville': {'id': '733', 'delta': 600, 'frames': 4}, 'WaggaWagga': {'id':", "radar_id and location not in RADARS: location = 'Sydney' self._log.error(\"Bad 'location' specified, using", "self._t0: self._t0 = t1 self._current = self._get_loop() return self._current # Private methods def", "'%03d' % radar_id valids = ', '.join(sorted(RADARS.keys())) if not radar_id and location not", "'delta': 600, 'frames': 4}, 'WaggaWagga': {'id': '553', 'delta': 600, 'frames': 4}, 'Warrego': {'id':", "'033', 'delta': 360, 'frames': 6}, 'Woomera': {'id': '273', 'delta': 600, 'frames': 4}, 'Wyndham':", "'Warruwi': {'id': '773', 'delta': 360, 'frames': 6}, 'Watheroo': {'id': '793', 'delta': 360, 'frames':", "= requests.get(url) if response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close()", "%s', layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 = self._get_url(suffix1)", "import io import logging import os import time import PIL.Image import requests RADARS", "and a radar image. ''' self._log.info('Getting loop for %s at %s', self._location, self._t0)", "not create directory %s', outdir) try: with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except", "= None self._log.error(\"Valid 'location' specified, ignoring 'radar_id'\") elif location: self._log.error(\"Bad 'location' specified, using", "{'id': '643', 'delta': 360, 'frames': 6}, 'Albany': {'id': '313', 'delta': 600, 'frames': 4},", "'frames': 6}, 'Mildura': {'id': '303', 'delta': 600, 'frames': 4}, 'Moree': {'id': '533', 'delta':", "one or more supplemental layers, a colorbar legend, and a radar image. '''", "def _get_legend(self): ''' Fetch the BOM colorbar legend image. ''' self._log.debug('Getting legend at", "%s at %s', layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1", "self._log.debug('Getting URL for path %s', path) return 'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str):", "'frames': 4}, 'Bowen': {'id': '243', 'delta': 600, 'frames': 4}, 'Brisbane': {'id': '663', 'delta':", "= range(self._frames, 0, -1) tz = dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0 -", "4}, 'Melbourne': {'id': '023', 'delta': 360, 'frames': 6}, 'Mildura': {'id': '303', 'delta': 600,", "'frames': 6}, 'Emerald': {'id': '723', 'delta': 600, 'frames': 4}, 'Esperance': {'id': '323', 'delta':", "BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log = logger or", "{'id': '073', 'delta': 600, 'frames': 4}, 'Yarrawonga': {'id': '493', 'delta': 360, 'frames': 6},", "'delta': 600, 'frames': 4}, 'Esperance': {'id': '323', 'delta': 600, 'frames': 4}, 'Geraldton': {'id':", "self._log.warning('Got NO frames for %s at %s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF')", "'frames': 6}, 'NorfolkIs': {'id': '623', 'delta': 600, 'frames': 4}, 'NWTasmania': {'id': '523', 'delta':", "% path def _get_wximg(self, time_str): ''' Return a radar weather image from the", "at %s', self._location, self._t0) bg = self._get_background() legend = self._get_legend() frames = []", "not be fetched, so the caller must deal with that possibility. ''' self._log.debug('Getting", "requests RADARS = { 'Adelaide': {'id': '643', 'delta': 360, 'frames': 6}, 'Albany': {'id':", "url): # pylint: disable=no-self-use ''' Fetch an image from the BOM. ''' self._log.debug('Getting", "'frames': 4}, 'Ceduna': {'id': '333', 'delta': 600, 'frames': 4}, 'Dampier': {'id': '153', 'delta':", "('topography', 'locations', 'range'): self._log.debug('Getting %s for %s at %s', layer, self._location, self._t0) suffix1", "360, 'frames': 6}, 'Mildura': {'id': '303', 'delta': 600, 'frames': 4}, 'Moree': {'id': '533',", "animated GIF comprising a set of frames, where each frame includes a background,", "if fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames or", "specified for radar ID %s, using %s\", radar_id, frames) self._location = location or", "is not None: background = PIL.Image.alpha_composite(background, image) return background def _get_frames(self): ''' Fetch", "layer) url1 = self._get_url(suffix1) image = self._get_image(url1) if image is not None: background", "try: with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write image", "of strings representing YYYYMMDDHHMM times for the most recent set of radar images", "self._current = self.current # Public methods @property def current(self): ''' Return the current", "% radar_id self._delta = delta or RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames'] self._radar_id", "self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id) background = self._get_image(url0) if", "{'id': '093', 'delta': 600, 'frames': 4}, 'Grafton': {'id': '283', 'delta': 600, 'frames': 4},", "for radar ID %s, using %s\", radar_id, frames) self._location = location or 'ID", "'Perth': {'id': '703', 'delta': 360, 'frames': 6}, 'PortHedland': {'id': '163', 'delta': 600, 'frames':", "'delta': 600, 'frames': 4}, 'Giles': {'id': '443', 'delta': 600, 'frames': 4}, 'Gladstone': {'id':", "{'id': '193', 'delta': 360, 'frames': 6}, 'Canberra': {'id': '403', 'delta': 360, 'frames': 6},", "0 self._current = self.current # Public methods @property def current(self): ''' Return the", "locations (e.g. city names), and distance-from-radar range markings, and merge into a single", "set of radar images to be used to create the animated GIF. '''", "6}, 'Katherine': {'id': '423', 'delta': 360, 'frames': 6}, 'Learmonth': {'id': '293', 'delta': 600,", "= 'Sydney' self._log.error(\"Bad 'location' specified, using '%s' (valid locations are: %s)\", location, valids)", "path): # pylint: disable=no-self-use self._log.debug('Getting URL for path %s', path) return 'http://www.bom.gov.au/%s' %", "'frames': 4}, 'Warrego': {'id': '673', 'delta': 600, 'frames': 4}, 'Warruwi': {'id': '773', 'delta':", "location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log = logger or logging.getLogger(__name__) if isinstance(radar_id,", "of radar images to be used to create the animated GIF. ''' self._log.debug('Getting", "= 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 = self._get_url(suffix1) image = self._get_image(url1) if image", "{'id': '703', 'delta': 360, 'frames': 6}, 'PortHedland': {'id': '163', 'delta': 600, 'frames': 4},", "'633', 'delta': 360, 'frames': 6}, 'Emerald': {'id': '723', 'delta': 600, 'frames': 4}, 'Esperance':", "600, 'frames': 4}, 'NWTasmania': {'id': '523', 'delta': 360, 'frames': 6}, 'Perth': {'id': '703',", "'radar_id'\") elif location: self._log.error(\"Bad 'location' specified, using ID %s (valid locations are: %s)\",", "frame_numbers] def _get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting URL for path %s', path)", "'303', 'delta': 600, 'frames': 4}, 'Moree': {'id': '533', 'delta': 600, 'frames': 4}, 'MorningtonIs':", "{'id': '773', 'delta': 360, 'frames': 6}, 'Watheroo': {'id': '793', 'delta': 360, 'frames': 6},", "t1 self._current = self._get_loop() return self._current # Private methods def _get_background(self): ''' Fetch", "%s at %s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile: outdir =", "fetched, so the caller must deal with that possibility. ''' self._log.debug('Getting radar imagery", "_get_background(self): ''' Fetch the background map, then the topography, locations (e.g. city names),", "%s, using %s\", radar_id, frames) self._location = location or 'ID %s' % radar_id", "6 self._log.error(\"No 'frames' specified for radar ID %s, using %s\", radar_id, frames) self._location", "'523', 'delta': 360, 'frames': 6}, 'Perth': {'id': '703', 'delta': 360, 'frames': 6}, 'PortHedland':", "'Wyndham': {'id': '073', 'delta': 600, 'frames': 4}, 'Yarrawonga': {'id': '493', 'delta': 360, 'frames':", "for %s at %s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True)", "= delta or RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames'] self._radar_id = radar_id or", "%s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO", "current(self): ''' Return the current BOM radar-loop image. ''' now = int(time.time()) t1", "website. Note that get_image() returns None if the image could not be fetched,", "{'id': '403', 'delta': 360, 'frames': 6}, 'Carnarvon': {'id': '053', 'delta': 600, 'frames': 4},", "elif location: self._log.error(\"Bad 'location' specified, using ID %s (valid locations are: %s)\", radar_id,", "4}, 'Gympie': {'id': '083', 'delta': 360, 'frames': 6}, 'HallsCreek': {'id': '393', 'delta': 600,", "then overlay on the legend to produce a frame. Collect and return the", "self._radar_id = radar_id or RADARS[location]['id'] self._outfile = outfile self._t0 = 0 self._current =", "600, 'frames': 4}, 'Marburg': {'id': '503', 'delta': 600, 'frames': 4}, 'Melbourne': {'id': '023',", "'393', 'delta': 600, 'frames': 4}, 'Hobart': {'id': '763', 'delta': 360, 'frames': 6}, 'Kalgoorlie':", "_get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting URL for path %s', path) return 'http://www.bom.gov.au/%s'", "None (the caller must expect this). ''' self._log.debug('Getting frames for %s at %s',", "4}, 'Bowen': {'id': '243', 'delta': 600, 'frames': 4}, 'Brisbane': {'id': '663', 'delta': 360,", "not in RADARS: location = 'Sydney' self._log.error(\"Bad 'location' specified, using '%s' (valid locations", "self.current # Public methods @property def current(self): ''' Return the current BOM radar-loop", "'NWTasmania': {'id': '523', 'delta': 360, 'frames': 6}, 'Perth': {'id': '703', 'delta': 360, 'frames':", "'Yarrawonga': {'id': '493', 'delta': 360, 'frames': 6}, } class BOMRadarLoop: def __init__(self, location=None,", "background map, then the topography, locations (e.g. city names), and distance-from-radar range markings,", "then the topography, locations (e.g. city names), and distance-from-radar range markings, and merge", "'delta': 600, 'frames': 4}, 'Longreach': {'id': '563', 'delta': 600, 'frames': 4}, 'Mackay': {'id':", "self._get_image(url1) if image is not None: background = PIL.Image.alpha_composite(background, image) return background def", "each frame includes a background, one or more supplemental layers, a colorbar legend,", "at %s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' % (self._radar_id, time_str) url = self._get_url(suffix)", "'463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames': 6}, 'Sydney':", "'AliceSprings': {'id': '253', 'delta': 600, 'frames': 4}, 'Bairnsdale': {'id': '683', 'delta': 600, 'frames':", "int): radar_id = '%03d' % radar_id valids = ', '.join(sorted(RADARS.keys())) if not radar_id", "RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id'] self._outfile =", "disable=no-self-use ''' Fetch an image from the BOM. ''' self._log.debug('Getting image %s', url)", "600, 'frames': 4}, 'Giles': {'id': '443', 'delta': 600, 'frames': 4}, 'Gladstone': {'id': '233',", "{'id': '713', 'delta': 360, 'frames': 6}, 'Townsville': {'id': '733', 'delta': 600, 'frames': 4},", "'frames': 6}, 'Carnarvon': {'id': '053', 'delta': 600, 'frames': 4}, 'Ceduna': {'id': '333', 'delta':", "%s at %s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else:", "'493', 'delta': 360, 'frames': 6}, } class BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None,", "'Gympie': {'id': '083', 'delta': 360, 'frames': 6}, 'HallsCreek': {'id': '393', 'delta': 600, 'frames':", "{'id': '043', 'delta': 360, 'frames': 6}, 'Newdegate': {'id': '383', 'delta': 360, 'frames': 6},", "'Woomera': {'id': '273', 'delta': 600, 'frames': 4}, 'Wyndham': {'id': '073', 'delta': 600, 'frames':", "360, 'frames': 6}, 'Broome': {'id': '173', 'delta': 600, 'frames': 4}, 'Cairns': {'id': '193',", "'ID %s' % radar_id self._delta = delta or RADARS[location]['delta'] self._frames = frames or", "if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could not create directory %s', outdir)", "self._location, self._t0) loop = io.BytesIO() frames = self._get_frames() if frames is not None:", "} class BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log =", "BOM. ''' self._log.debug('Getting image %s', url) response = requests.get(url) if response.status_code == 200:", "image from the BOM. ''' self._log.debug('Getting image %s', url) response = requests.get(url) if", "'wb') as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write image to %s', self._outfile)", "Fetch the BOM colorbar legend image. ''' self._log.debug('Getting legend at %s', self._t0) url", "{'id': '793', 'delta': 360, 'frames': 6}, 'Weipa': {'id': '783', 'delta': 360, 'frames': 6},", "self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile) if not", "'363', 'delta': 600, 'frames': 4}, 'MountIsa': {'id': '753', 'delta': 360, 'frames': 6}, 'MtGambier':", "return self._get_image(url) def _get_loop(self): ''' Return an animated GIF comprising a set of", "layer in ('topography', 'locations', 'range'): self._log.debug('Getting %s for %s at %s', layer, self._location,", "radar_id valids = ', '.join(sorted(RADARS.keys())) if not radar_id and location not in RADARS:", "self._log = logger or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id = '%03d' % radar_id", "for each expected time, composite it with a common background image, then overlay", "[] if bg and legend: for time_str in self._get_time_strs(): fg = self._get_wximg(time_str) if", "'063', 'delta': 600, 'frames': 4}, 'Giles': {'id': '443', 'delta': 600, 'frames': 4}, 'Gladstone':", "= int(time.time()) t1 = now - (now % self._delta) if t1 > self._t0:", "comprising a set of frames, where each frame includes a background, one or", "%s at %s', self._location, self._t0) loop = io.BytesIO() frames = self._get_frames() if frames", "frame_numbers = range(self._frames, 0, -1) tz = dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0", "'frames': 4}, 'Bairnsdale': {'id': '683', 'delta': 600, 'frames': 4}, 'Bowen': {'id': '243', 'delta':", "expected time, composite it with a common background image, then overlay on the", "self._log.error(\"No 'delta' specified for radar ID %s, using %s\", radar_id, delta) if radar_id", "'Marburg': {'id': '503', 'delta': 600, 'frames': 4}, 'Melbourne': {'id': '023', 'delta': 360, 'frames':", "or 'ID %s' % radar_id self._delta = delta or RADARS[location]['delta'] self._frames = frames", "recent set of radar images to be used to create the animated GIF.", "frames, ignoring any blanks. If no frames were produced, return None (the caller", "path) return 'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str): ''' Return a radar weather", "current BOM radar-loop image. ''' now = int(time.time()) t1 = now - (now", "''' Return a radar weather image from the BOM website. Note that get_image()", "{'id': '313', 'delta': 600, 'frames': 4}, 'AliceSprings': {'id': '253', 'delta': 600, 'frames': 4},", "locations are: %s)\", location, valids) if radar_id: if location in RADARS: radar_id =", "'313', 'delta': 600, 'frames': 4}, 'AliceSprings': {'id': '253', 'delta': 600, 'frames': 4}, 'Bairnsdale':", "n in frame_numbers] def _get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting URL for path", "None if the image could not be fetched, so the caller must deal", "(0, 0)) return frames or None def _get_image(self, url): # pylint: disable=no-self-use '''", "must deal with that possibility. ''' self._log.debug('Getting radar imagery for %s at %s',", "the image could not be fetched, so the caller must deal with that", "'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta': 360, 'frames': 6}, 'Katherine': {'id': '423', 'delta':", "import datetime as dt import io import logging import os import time import", "logging.getLogger(__name__) if isinstance(radar_id, int): radar_id = '%03d' % radar_id valids = ', '.join(sorted(RADARS.keys()))", "self._outfile) return loop.getvalue() def _get_time_strs(self): ''' Return a list of strings representing YYYYMMDDHHMM", "'193', 'delta': 360, 'frames': 6}, 'Canberra': {'id': '403', 'delta': 360, 'frames': 6}, 'Carnarvon':", "%s)\", radar_id, valids) if radar_id and not delta: delta = 360 self._log.error(\"No 'delta'", "'frames': 4}, 'Hobart': {'id': '763', 'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta':", "', '.join(sorted(RADARS.keys())) if not radar_id and location not in RADARS: location = 'Sydney'", "_get_loop(self): ''' Return an animated GIF comprising a set of frames, where each", "directory %s', outdir) try: with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could", "return None (the caller must expect this). ''' self._log.debug('Getting frames for %s at", "'locations', 'range'): self._log.debug('Getting %s for %s at %s', layer, self._location, self._t0) suffix1 =", "4}, 'Longreach': {'id': '563', 'delta': 600, 'frames': 4}, 'Mackay': {'id': '223', 'delta': 600,", "the background map, then the topography, locations (e.g. city names), and distance-from-radar range", "''' self._log.debug('Getting background for %s at %s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0", "rgba_img = image.convert('RGBA') image.close() return rgba_img return None def _get_legend(self): ''' Fetch the", "def __init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log = logger or logging.getLogger(__name__)", "'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 = self._get_url(suffix1) image = self._get_image(url1) if image is", "background for %s at %s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0", "'frames': 4}, 'Wyndham': {'id': '073', 'delta': 600, 'frames': 4}, 'Yarrawonga': {'id': '493', 'delta':", "'Dampier': {'id': '153', 'delta': 600, 'frames': 4}, 'Darwin': {'id': '633', 'delta': 360, 'frames':", "blanks. If no frames were produced, return None (the caller must expect this).", "image. ''' self._log.debug('Getting legend at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def", "common background image, then overlay on the legend to produce a frame. Collect", "'023', 'delta': 360, 'frames': 6}, 'Mildura': {'id': '303', 'delta': 600, 'frames': 4}, 'Moree':", "'SellicksHill': {'id': '463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames':", "= io.BytesIO() frames = self._get_frames() if frames is not None: self._log.debug('Got %s frames", "outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write image to %s', self._outfile) return loop.getvalue() def", "a set of frames, where each frame includes a background, one or more", "'delta': 360, 'frames': 6}, 'Newdegate': {'id': '383', 'delta': 360, 'frames': 6}, 'NorfolkIs': {'id':", "colorbar legend, and a radar image. ''' self._log.info('Getting loop for %s at %s',", "{'id': '463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames': 6},", "'delta': 600, 'frames': 4}, 'Bairnsdale': {'id': '683', 'delta': 600, 'frames': 4}, 'Bowen': {'id':", "background def _get_frames(self): ''' Fetch a radar image for each expected time, composite", "not delta: delta = 360 self._log.error(\"No 'delta' specified for radar ID %s, using", "bg = self._get_background() legend = self._get_legend() frames = [] if bg and legend:", "outfile self._t0 = 0 self._current = self.current # Public methods @property def current(self):", "= t1 self._current = self._get_loop() return self._current # Private methods def _get_background(self): '''", "is None: return None for layer in ('topography', 'locations', 'range'): self._log.debug('Getting %s for", "4}, 'Ceduna': {'id': '333', 'delta': 600, 'frames': 4}, 'Dampier': {'id': '153', 'delta': 600,", "Collect and return the frames, ignoring any blanks. If no frames were produced,", "the most recent set of radar images to be used to create the", "import time import PIL.Image import requests RADARS = { 'Adelaide': {'id': '643', 'delta':", "GIF. ''' self._log.debug('Getting time strings starting at %s', self._t0) frame_numbers = range(self._frames, 0,", "return None for layer in ('topography', 'locations', 'range'): self._log.debug('Getting %s for %s at", "io.BytesIO() frames = self._get_frames() if frames is not None: self._log.debug('Got %s frames for", "= PIL.Image.alpha_composite(background, image) return background def _get_frames(self): ''' Fetch a radar image for", "dt import io import logging import os import time import PIL.Image import requests", "'delta': 600, 'frames': 4}, 'AliceSprings': {'id': '253', 'delta': 600, 'frames': 4}, 'Bairnsdale': {'id':", "format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO frames for %s at %s', self._location, self._t0)", "360 self._log.error(\"No 'delta' specified for radar ID %s, using %s\", radar_id, delta) if", "frame includes a background, one or more supplemental layers, a colorbar legend, and", "'403', 'delta': 360, 'frames': 6}, 'Carnarvon': {'id': '053', 'delta': 600, 'frames': 4}, 'Ceduna':", "'frames': 4}, 'Longreach': {'id': '563', 'delta': 600, 'frames': 4}, 'Mackay': {'id': '223', 'delta':", "n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in frame_numbers]", "logger=None): self._log = logger or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id = '%03d' %", "%s (valid locations are: %s)\", radar_id, valids) if radar_id and not delta: delta", "6}, 'Broome': {'id': '173', 'delta': 600, 'frames': 4}, 'Cairns': {'id': '193', 'delta': 360,", "4}, 'Gove': {'id': '093', 'delta': 600, 'frames': 4}, 'Grafton': {'id': '283', 'delta': 600,", "%s', self._t0) frame_numbers = range(self._frames, 0, -1) tz = dt.timezone.utc f = lambda", "'frames': 6}, 'Weipa': {'id': '783', 'delta': 360, 'frames': 6}, 'WillisIs': {'id': '413', 'delta':", "600, 'frames': 4}, 'Moree': {'id': '533', 'delta': 600, 'frames': 4}, 'MorningtonIs': {'id': '363',", "radar_id, frames) self._location = location or 'ID %s' % radar_id self._delta = delta", "to create the animated GIF. ''' self._log.debug('Getting time strings starting at %s', self._t0)", "{'id': '483', 'delta': 360, 'frames': 6}, 'Katherine': {'id': '423', 'delta': 360, 'frames': 6},", "600, 'frames': 4}, 'Warruwi': {'id': '773', 'delta': 360, 'frames': 6}, 'Watheroo': {'id': '793',", "6}, 'Canberra': {'id': '403', 'delta': 360, 'frames': 6}, 'Carnarvon': {'id': '053', 'delta': 600,", "Return a list of strings representing YYYYMMDDHHMM times for the most recent set", "'733', 'delta': 600, 'frames': 4}, 'WaggaWagga': {'id': '553', 'delta': 600, 'frames': 4}, 'Warrego':", "%s frames for %s at %s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF',", "Return an animated GIF comprising a set of frames, where each frame includes", "None: self._log.debug('Got %s frames for %s at %s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:],", "{'id': '553', 'delta': 600, 'frames': 4}, 'Warrego': {'id': '673', 'delta': 600, 'frames': 4},", "'frames': 6}, 'Canberra': {'id': '403', 'delta': 360, 'frames': 6}, 'Carnarvon': {'id': '053', 'delta':", "'Grafton': {'id': '283', 'delta': 600, 'frames': 4}, 'Gympie': {'id': '083', 'delta': 360, 'frames':" ]
[ "is used, end the game, instead of showing errors print('Game Over') if __name__", "used, end the game, instead of showing errors print('Game Over') if __name__ ==", "and 2 for Computer: ') if player == '1': play(user_game) elif player ==", "try: print(func()) except ValueError: # If there was a value error, the function", "You and 2 for Computer: ') if player == '1': play(user_game) elif player", "user_game, comp_game def play(func): try: print(func()) except ValueError: # If there was a", "game, instead of showing errors print('Game Over') if __name__ == '__main__': player =", "When ctrl + c is used, end the game, instead of showing errors", "errors print('Game Over') if __name__ == '__main__': player = input('Who is guessing: 1", "except ValueError: # If there was a value error, the function will be", "again print('\\n Try Again') return play(func) except KeyboardInterrupt: # When ctrl + c", "ValueError: # If there was a value error, the function will be called", "== '__main__': player = input('Who is guessing: 1 for You and 2 for", "'__main__': player = input('Who is guessing: 1 for You and 2 for Computer:", "guessing: 1 for You and 2 for Computer: ') if player == '1':", "the function will be called again print('\\n Try Again') return play(func) except KeyboardInterrupt:", "of showing errors print('Game Over') if __name__ == '__main__': player = input('Who is", "print(func()) except ValueError: # If there was a value error, the function will", "ctrl + c is used, end the game, instead of showing errors print('Game", "a value error, the function will be called again print('\\n Try Again') return", "# When ctrl + c is used, end the game, instead of showing", "is guessing: 1 for You and 2 for Computer: ') if player ==", "== '1': play(user_game) elif player == '2': play(comp_game) else: print('Please enter a right", "# If there was a value error, the function will be called again", "player = input('Who is guessing: 1 for You and 2 for Computer: ')", "print('Game Over') if __name__ == '__main__': player = input('Who is guessing: 1 for", "def play(func): try: print(func()) except ValueError: # If there was a value error,", "c is used, end the game, instead of showing errors print('Game Over') if", "__name__ == '__main__': player = input('Who is guessing: 1 for You and 2", "error, the function will be called again print('\\n Try Again') return play(func) except", "input('Who is guessing: 1 for You and 2 for Computer: ') if player", "the game, instead of showing errors print('Game Over') if __name__ == '__main__': player", "Over') if __name__ == '__main__': player = input('Who is guessing: 1 for You", "1 for You and 2 for Computer: ') if player == '1': play(user_game)", "be called again print('\\n Try Again') return play(func) except KeyboardInterrupt: # When ctrl", "Again') return play(func) except KeyboardInterrupt: # When ctrl + c is used, end", "return play(func) except KeyboardInterrupt: # When ctrl + c is used, end the", "print('\\n Try Again') return play(func) except KeyboardInterrupt: # When ctrl + c is", "there was a value error, the function will be called again print('\\n Try", "play(func): try: print(func()) except ValueError: # If there was a value error, the", "KeyboardInterrupt: # When ctrl + c is used, end the game, instead of", "play(func) except KeyboardInterrupt: # When ctrl + c is used, end the game,", "from utils import user_game, comp_game def play(func): try: print(func()) except ValueError: # If", "value error, the function will be called again print('\\n Try Again') return play(func)", "called again print('\\n Try Again') return play(func) except KeyboardInterrupt: # When ctrl +", "showing errors print('Game Over') if __name__ == '__main__': player = input('Who is guessing:", "except KeyboardInterrupt: # When ctrl + c is used, end the game, instead", "if __name__ == '__main__': player = input('Who is guessing: 1 for You and", "2 for Computer: ') if player == '1': play(user_game) elif player == '2':", "Computer: ') if player == '1': play(user_game) elif player == '2': play(comp_game) else:", "if player == '1': play(user_game) elif player == '2': play(comp_game) else: print('Please enter", "comp_game def play(func): try: print(func()) except ValueError: # If there was a value", "end the game, instead of showing errors print('Game Over') if __name__ == '__main__':", "'1': play(user_game) elif player == '2': play(comp_game) else: print('Please enter a right value')", "import user_game, comp_game def play(func): try: print(func()) except ValueError: # If there was", "for Computer: ') if player == '1': play(user_game) elif player == '2': play(comp_game)", "') if player == '1': play(user_game) elif player == '2': play(comp_game) else: print('Please", "will be called again print('\\n Try Again') return play(func) except KeyboardInterrupt: # When", "= input('Who is guessing: 1 for You and 2 for Computer: ') if", "for You and 2 for Computer: ') if player == '1': play(user_game) elif", "player == '1': play(user_game) elif player == '2': play(comp_game) else: print('Please enter a", "function will be called again print('\\n Try Again') return play(func) except KeyboardInterrupt: #", "Try Again') return play(func) except KeyboardInterrupt: # When ctrl + c is used,", "+ c is used, end the game, instead of showing errors print('Game Over')", "was a value error, the function will be called again print('\\n Try Again')", "If there was a value error, the function will be called again print('\\n", "instead of showing errors print('Game Over') if __name__ == '__main__': player = input('Who", "utils import user_game, comp_game def play(func): try: print(func()) except ValueError: # If there" ]
[ "== 0 assert get_indice(' ') == 8 assert get_indice('J') == 3 def test_converte():", "entrada_valida('') == False def test_get_indice(): assert get_indice('A') == 0 assert get_indice(' ') ==", "GHI') == True assert entrada_valida('') == False def test_get_indice(): assert get_indice('A') == 0", "False def test_get_indice(): assert get_indice('A') == 0 assert get_indice(' ') == 8 assert", "* def test_valid_input(): assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') == True assert", "entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('') == False def", "') == 8 assert get_indice('J') == 3 def test_converte(): assert converte('PUZZLES') == '7889999_9999555337777'", "assert entrada_valida('') == False def test_get_indice(): assert get_indice('A') == 0 assert get_indice(' ')", "get_indice('A') == 0 assert get_indice(' ') == 8 assert get_indice('J') == 3 def", "0 assert get_indice(' ') == 8 assert get_indice('J') == 3 def test_converte(): assert", "False assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('') == False def test_get_indice(): assert", "assert get_indice(' ') == 8 assert get_indice('J') == 3 def test_converte(): assert converte('PUZZLES')", "get_indice('J') == 3 def test_converte(): assert converte('PUZZLES') == '7889999_9999555337777' assert converte('OI PESSOAL') ==", "entrada_valida('ABCDEF GHI') == True assert entrada_valida('') == False def test_get_indice(): assert get_indice('A') ==", "assert get_indice('A') == 0 assert get_indice(' ') == 8 assert get_indice('J') == 3", "assert get_indice('J') == 3 def test_converte(): assert converte('PUZZLES') == '7889999_9999555337777' assert converte('OI PESSOAL')", "def test_get_indice(): assert get_indice('A') == 0 assert get_indice(' ') == 8 assert get_indice('J')", "assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('') == False", "get_indice(' ') == 8 assert get_indice('J') == 3 def test_converte(): assert converte('PUZZLES') ==", "assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('') == False def test_get_indice(): assert get_indice('A')", "== True assert entrada_valida('') == False def test_get_indice(): assert get_indice('A') == 0 assert", "test_get_indice(): assert get_indice('A') == 0 assert get_indice(' ') == 8 assert get_indice('J') ==", "dojo import * def test_valid_input(): assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') ==", "== 8 assert get_indice('J') == 3 def test_converte(): assert converte('PUZZLES') == '7889999_9999555337777' assert", "test_valid_input(): assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('') ==", "== 3 def test_converte(): assert converte('PUZZLES') == '7889999_9999555337777' assert converte('OI PESSOAL') == '66644407337777_77776662555'", "== False def test_get_indice(): assert get_indice('A') == 0 assert get_indice(' ') == 8", "== False assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('') == False def test_get_indice():", "from dojo import * def test_valid_input(): assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI')", "def test_valid_input(): assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('')", "True assert entrada_valida('') == False def test_get_indice(): assert get_indice('A') == 0 assert get_indice('", "8 assert get_indice('J') == 3 def test_converte(): assert converte('PUZZLES') == '7889999_9999555337777' assert converte('OI", "import * def test_valid_input(): assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') == True" ]
[ "= list(), list() for a in range(1, n): res = primality.miller_rabin(n=n, a=a) if", "list() for a in range(1, n): res = primality.miller_rabin(n=n, a=a) if res ==", "for a in range(1, n): res = primality.miller_rabin(n=n, a=a) if res == \"composite\":", "witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\" + str(witnesses)) print(str(len(nonwitnesses)) + \" nonwitnesses:\"", "def miller_rabin(n): witnesses, nonwitnesses = list(), list() for a in range(1, n): res", "miller_rabin(n): witnesses, nonwitnesses = list(), list() for a in range(1, n): res =", "res = primality.miller_rabin(n=n, a=a) if res == \"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) +", "witnesses, nonwitnesses = list(), list() for a in range(1, n): res = primality.miller_rabin(n=n,", "a in range(1, n): res = primality.miller_rabin(n=n, a=a) if res == \"composite\": witnesses.append(a)", "a=a) if res == \"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\" +", "nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\" + str(witnesses)) print(str(len(nonwitnesses)) + \" nonwitnesses:\" + str(nonwitnesses))", "primality.miller_rabin(n=n, a=a) if res == \"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\"", "n): res = primality.miller_rabin(n=n, a=a) if res == \"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses))", "list(), list() for a in range(1, n): res = primality.miller_rabin(n=n, a=a) if res", "\"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\" + str(witnesses)) print(str(len(nonwitnesses)) + \"", "<gh_stars>1-10 import primality def miller_rabin(n): witnesses, nonwitnesses = list(), list() for a in", "if res == \"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\" + str(witnesses))", "nonwitnesses = list(), list() for a in range(1, n): res = primality.miller_rabin(n=n, a=a)", "primality def miller_rabin(n): witnesses, nonwitnesses = list(), list() for a in range(1, n):", "else: nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\" + str(witnesses)) print(str(len(nonwitnesses)) + \" nonwitnesses:\" +", "in range(1, n): res = primality.miller_rabin(n=n, a=a) if res == \"composite\": witnesses.append(a) else:", "= primality.miller_rabin(n=n, a=a) if res == \"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + \"", "range(1, n): res = primality.miller_rabin(n=n, a=a) if res == \"composite\": witnesses.append(a) else: nonwitnesses.append(a)", "== \"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\" + str(witnesses)) print(str(len(nonwitnesses)) +", "import primality def miller_rabin(n): witnesses, nonwitnesses = list(), list() for a in range(1,", "res == \"composite\": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + \" witnesses:\" + str(witnesses)) print(str(len(nonwitnesses))" ]
[ "rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation = translation self.rounding_digits = rounding_digits self.max_points_in_way =", "ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: #", "1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else:", "rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way < 2: # pointless :-) return logging.debug(\"Splitting", "if new_way.id != way.id: for point in points: point.removeparent(way) point.addparent(new_way) return new_ways def", "def __get_layer_fields(self, layer): layer_fields = [] layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()):", "y, z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {}, True) if previous_node_id ==", "tags): osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D", "ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags", "ogrfeature, layer_fields, source_encoding, reproject = lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject)", "= node.id return way def __parse_polygon(self, ogrgeometry, tags): # Special case polygons with", "accompany any distribution of this code. import logging from osgeo import ogr from", "return node def __add_way(self, tags): way = OsmWay(tags) self.__ways.append(way) return way def __add_relation(self,", "geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif", "any distribution of this code. import logging from osgeo import ogr from osgeo", "datawriter def __enter__(self): self.datawriter.open() return self.datawriter def __exit__(self, exception_type, value, traceback): self.datawriter.close() def", "in zip(new_ways, new_points): new_way.points = points if new_way.id != way.id: for point in", "ring. This does not (or at least # should not) change behavior when", "the source data attributes # and passes them to the filter_tags function, returning", "else need special casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D", "# Released under the MIT license, as given in the file LICENSE, which", "logging.debug(\"Splitting long ways\") for way in self.__ways: is_way_in_relation = len([ p for p", "] elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = [] for linestring", "and passes them to the filter_tags function, returning the result. def __get_feature_tags(self, ogrfeature,", "{}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return [ relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags)", "in range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {}, True)", "if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [", "if len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if way", "[ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon,", "1) ] if not is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way) for new_way, points", "self.__ways.append(new_way) for new_way, points in zip(new_ways, new_points): new_way.points = points if new_way.id !=", "= self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node =", "ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1: relation", "[ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif", "= self.__add_way(tags) # LineString.GetPoint() returns a tuple, so we can't call parsePoint on", "type(p) == OsmRelation ]) > 0 if len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way,", "with no rings?\") elif ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points)", "new_ways[1:]: self.__ways.append(new_way) for new_way, points in zip(new_ways, new_points): new_way.points = points if new_way.id", "[ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]:", "[ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries =", "Copyright (c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The University of Vermont", "\\ for i in range(0, len(way.points), self.max_points_in_way - 1) ] new_ways = [", "ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags) # LineString.GetPoint() returns", "[ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = [] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags))", "tags)) return geometries else: relation = self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member =", "self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for i in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i)", "__parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry, tags): way", "!= node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id return way def __parse_polygon(self, ogrgeometry, tags):", "self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return [ relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0),", "== ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return", "field_def.GetNameRef(), field_def.GetType())) return layer_fields # This function builds up a dictionary with the", "return int(round(n * 10**self.rounding_digits)) def __add_node(self, x, y, tags, is_way_member): rx = self.__round_number(x)", "ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]:", "self.__add_way(tags) # LineString.GetPoint() returns a tuple, so we can't call parsePoint on it", "= None if is_way_member: unique_node_id = (rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry,", "z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {}, True) if previous_node_id == None", "ogrgeometry, tags): osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint,", "way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way < 2: #", "in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes)", "def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags = {} for (index, field_name, field_type) in", "new_points = [ way.points[i:i + self.max_points_in_way] \\ for i in range(0, len(way.points), self.max_points_in_way", "LineString.GetPoint() returns a tuple, so we can't call parsePoint on it # and", "= ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1:", "i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior,", "] new_ways = [ way ] + [ OsmWay(way.get_tags()) for i in range(len(new_points)", "way_parts): way_roles = [ m[1] for m in rel.members if m[0] == way_parts[0]", "traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter) as dw: dw.write_header(self.__bounds)", "0 else way_roles[0] for way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if", "__parse_geometry to avoid second loop for osmgeometry in [ geom for geom in", "which must # accompany any distribution of this code. import logging from osgeo", "osr from .osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData: def __init__(self, translation,", "ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points = [ way.points[i:i + self.max_points_in_way] \\ for", "is_way_in_relation): new_points = [ way.points[i:i + self.max_points_in_way] \\ for i in range(0, len(way.points),", "OsmWay, OsmRelation class OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation", "__add_way(self, tags): way = OsmWay(tags) self.__ways.append(way) return way def __add_relation(self, tags): relation =", "in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\")) return [ relation ]", "data attributes # and passes them to the filter_tags function, returning the result.", "to osm multipolygon, so special case it # TODO: Does anything else need", "def __split_way_in_relation(self, rel, way_parts): way_roles = [ m[1] for m in rel.members if", "p for p in way.get_parents() if type(p) == OsmRelation ]) > 0 if", "x, y, tags, is_way_member): rx = self.__round_number(x) ry = self.__round_number(y) unique_node_id = None", "datawriter): self.datawriter = datawriter def __enter__(self): self.datawriter.open() return self.datawriter def __exit__(self, exception_type, value,", "contributors # Released under the MIT license, as given in the file LICENSE,", "%s\" % str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject = lambda", "ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry,", "OsmRelation class OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation =", "= ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self,", "None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is None: return reproject(ogrgeometry)", "unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] =", "reproject = lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is", "[ m[1] for m in rel.members if m[0] == way_parts[0] ] way_role =", "def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter) as dw: dw.write_header(self.__bounds) dw.write_nodes(self.__nodes) dw.write_ways(self.__ways)", "can't call parsePoint on it # and instead have to create the point", "layer: layer_fields = self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields,", "rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds self.__bounds = OsmBoundary() self.__nodes = []", "to avoid second loop for osmgeometry in [ geom for geom in osmgeometries", "]: # ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbPolygon,", "layer_fields, source_encoding): tags = {} for (index, field_name, field_type) in layer_fields: field_value =", "= self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom =", "OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False)", "tags): # OGR MultiPolygon maps easily to osm multipolygon, so special case it", "relation = OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(),", "builds up a dictionary with the source data attributes # and passes them", "self.__nodes.append(node) return node def __add_way(self, tags): way = OsmWay(tags) self.__ways.append(way) return way def", "range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i", "self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation = self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {})", "geometries = [] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation", "special case it # TODO: Does anything else need special casing? geometry_type =", "ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter):", "__init__(self, datawriter): self.datawriter = datawriter def __enter__(self): self.datawriter.open() return self.datawriter def __exit__(self, exception_type,", "is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way) for new_way, points in zip(new_ways, new_points): new_way.points", "way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles = [ m[1] for m in rel.members", "in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D", "* 10**self.rounding_digits)) def __add_node(self, x, y, tags, is_way_member): rx = self.__round_number(x) ry =", "geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() >", "interior.addparent(relation) relation.members.append((interior, \"inner\")) return [ relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ]", "for new_way in new_ways[1:]: self.__ways.append(new_way) for new_way, points in zip(new_ways, new_points): new_way.points =", "self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\")) return [ relation ] def __parse_geometry(self, ogrgeometry, tags):", "when simplify relations is turned on. if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with no", "distribution of this code. import logging from osgeo import ogr from osgeo import", "= \"\" if len(way_roles) == 0 else way_roles[0] for way in way_parts[1:]: way.addparent(rel)", "0: logging.warning(\"Polygon with no rings?\") elif ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags)", "ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]]", "ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation def __parse_collection(self, ogrgeometry,", "layer_fields, source_encoding) if feature_tags is None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries =", "attributes # and passes them to the filter_tags function, returning the result. def", "__parse_polygon(self, ogrgeometry, tags): # Special case polygons with only one ring. This does", "TODO: Does anything else need special casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type in", "node.addparent(way) previous_node_id = node.id return way def __parse_polygon(self, ogrgeometry, tags): # Special case", "# OGR MultiPolygon maps easily to osm multipolygon, so special case it #", "<<EMAIL>>, github contributors # Released under the MIT license, as given in the", "member.addparent(relation) relation.members.append((member, \"member\")) return [ relation ] def __parse_geometry(self, ogrgeometry, tags): osmgeometries =", "field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields # This function builds up", "reproject) = datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature", "2: # pointless :-) return logging.debug(\"Splitting long ways\") for way in self.__ways: is_way_in_relation", "self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation)", "value, traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter) as dw:", "tuple, so we can't call parsePoint on it # and instead have to", "field_def.GetType())) return layer_fields # This function builds up a dictionary with the source", "# ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D", "# TODO performance: run in __parse_geometry to avoid second loop for osmgeometry in", "None or previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id return way def", "elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not exist", "if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points = [", "<NAME> <<EMAIL>>, The University of Vermont # <<EMAIL>>, github contributors # Released under", "return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run", "n): return int(round(n * 10**self.rounding_digits)) def __add_node(self, x, y, tags, is_way_member): rx =", "TODO performance: run in __parse_geometry to avoid second loop for osmgeometry in [", "filter_tags function, returning the result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags = {}", "ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return [ relation ] else:", "i in range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {},", "self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\")) return", "return osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject = lambda geometry: None): ogrfilteredfeature", "osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type", "]: geometries = [] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else:", "parsePoint on it # and instead have to create the point ourself previous_node_id", "] + [ OsmWay(way.get_tags()) for i in range(len(new_points) - 1) ] if not", "node = self.__add_node(x, y, {}, True) if previous_node_id == None or previous_node_id !=", "# Special case polygons with only one ring. This does not (or at", "= None for i in range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i) node =", "way.id: for point in points: point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts): new_relation", "performance: run in __parse_geometry to avoid second loop for osmgeometry in [ geom", "j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager:", "datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter): self.datawriter = datawriter def __enter__(self):", "in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation = self.__add_relation(tags) for i in", "self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry", "logging from osgeo import ogr from osgeo import osr from .osm_geometries import OsmBoundary,", "[] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation = self.__add_relation(tags)", "(way, \"outer\") for way in way_parts ] for way in way_parts: way.addparent(new_relation) def", "= datawriter def __enter__(self): self.datawriter.open() return self.datawriter def __exit__(self, exception_type, value, traceback): self.datawriter.close()", "[ (way, \"outer\") for way in way_parts ] for way in way_parts: way.addparent(new_relation)", "rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for i in range(datasource.get_layer_count()): (layer,", "for j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class", "relation = self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon with no", "= layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return", "in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles = [ m[1] for m", "self.__round_number(y) unique_node_id = None if is_way_member: unique_node_id = (rx, ry) else: unique_node_id =", "from osgeo import osr from .osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData:", "if previous_node_id == None or previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id", "= [ m[1] for m in rel.members if m[0] == way_parts[0] ] way_role", "{}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior", "else: relation = self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation)", "geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbMultiPoint,", "This does not (or at least # should not) change behavior when simplify", "way_parts): new_relation = self.__add_relation({}) new_relation.members = [ (way, \"outer\") for way in way_parts", "geometries else: relation = self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {})", "field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def", "]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D,", "self.datawriter = datawriter def __enter__(self): self.datawriter.open() return self.datawriter def __exit__(self, exception_type, value, traceback):", "tags, is_way_member): rx = self.__round_number(x) ry = self.__round_number(y) unique_node_id = None if is_way_member:", "None relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation)", "(index, field_name, field_type) in layer_fields: field_value = '' if field_type == ogr.OFTString: field_value", "return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags) #", "in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = [] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring),", "relation def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry,", "geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags)", "<NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The University of Vermont # <<EMAIL>>, github contributors", "# options self.translation = translation self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds =", "self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation = self.__add_relation(tags)", "- 1) ] new_ways = [ way ] + [ OsmWay(way.get_tags()) for i", "node def __add_way(self, tags): way = OsmWay(tags) self.__ways.append(way) return way def __add_relation(self, tags):", "self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds self.__bounds = OsmBoundary() self.__nodes", "OsmWay(tags) self.__ways.append(way) return way def __add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation) return relation", "-*- # Copyright (c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The University", "relation def __parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon maps easily to osm multipolygon,", "ring?\") return None relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i),", "Does anything else need special casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [", "ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return [", "(minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self, n):", "__round_number(self, n): return int(round(n * 10**self.rounding_digits)) def __add_node(self, x, y, tags, is_way_member): rx", "1: relation = self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior =", "% str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject = lambda geometry:", "in the file LICENSE, which must # accompany any distribution of this code.", "loop for osmgeometry in [ geom for geom in osmgeometries if geom ]:", "layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter): self.datawriter = datawriter def", "in layer_fields: field_value = '' if field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else:", "polygons with only one ring. This does not (or at least # should", "OGR MultiPolygon maps easily to osm multipolygon, so special case it # TODO:", "points in zip(new_ways, new_points): new_way.points = points if new_way.id != way.id: for point", "a tuple, so we can't call parsePoint on it # and instead have", "return geometries else: relation = self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i),", "feature_tags) # TODO performance: run in __parse_geometry to avoid second loop for osmgeometry", "\"member\")) return [ relation ] def __parse_geometry(self, ogrgeometry, tags): osmgeometries = [] geometry_type", "= self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return [ relation ] else: return [", "datasource): for i in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if layer: layer_fields =", "self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel", "a dictionary with the source data attributes # and passes them to the", "= self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run in __parse_geometry to avoid second loop", "osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does", "{} for (index, field_name, field_type) in layer_fields: field_value = '' if field_type ==", "relation.members.append((interior, \"inner\")) return [ relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif", "way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for i in range(datasource.get_layer_count()): (layer, reproject) =", "i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields # This", "result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation", "result else: relation = self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon", "if not is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents():", "\"inner\")) return [ relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type", "tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation = self.__add_relation(tags) try:", "== OsmRelation ]) > 0 if len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation)", "if self.max_points_in_way < 2: # pointless :-) return logging.debug(\"Splitting long ways\") for way", "ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()):", "logging.warning(\"Polygon with no exterior ring?\") return None relation.members.append((exterior, \"outer\")) for i in range(1,", "or previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id return way def __parse_polygon(self,", "simplify relations is turned on. if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with no rings?\")", "tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy) =", "ogrgeometry, tags): # OGR MultiPolygon maps easily to osm multipolygon, so special case", "way in way_parts ] for way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts):", "layer_fields = [] layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i)", "on it # and instead have to create the point ourself previous_node_id =", "tags): way = OsmWay(tags) self.__ways.append(way) return way def __add_relation(self, tags): relation = OsmRelation(tags)", "= self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for", "new_way, points in zip(new_ways, new_points): new_way.points = points if new_way.id != way.id: for", "no rings?\") elif ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) >", "= self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if", "] if not is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way) for new_way, points in", "with no exterior ring?\") return None relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryCount()):", "way, is_way_in_relation): new_points = [ way.points[i:i + self.max_points_in_way] \\ for i in range(0,", "the file LICENSE, which must # accompany any distribution of this code. import", "previous_node_id = node.id return way def __parse_polygon(self, ogrgeometry, tags): # Special case polygons", "= self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\"))", "] for way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles = [", "= layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields # This function builds up a", "self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run in __parse_geometry to avoid", "ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding)", "+ [ OsmWay(way.get_tags()) for i in range(len(new_points) - 1) ] if not is_way_in_relation:", "from osgeo import ogr from osgeo import osr from .osm_geometries import OsmBoundary, OsmPoint,", "it # and instead have to create the point ourself previous_node_id = None", "for i in range(len(new_points) - 1) ] if not is_way_in_relation: for new_way in", "# <NAME> <<EMAIL>>, The University of Vermont # <<EMAIL>>, github contributors # Released", "feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is None: return reproject(ogrgeometry) if self.add_bounds:", "special casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if", "ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0)", "class DataWriterContextManager: def __init__(self, datawriter): self.datawriter = datawriter def __enter__(self): self.datawriter.open() return self.datawriter", "points if new_way.id != way.id: for point in points: point.removeparent(way) point.addparent(new_way) return new_ways", "else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx,", "= datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature =", "This function builds up a dictionary with the source data attributes # and", "if type(p) == OsmRelation ]) > 0 if len(way.points) > self.max_points_in_way: way_parts =", "tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node def __add_way(self, tags): way = OsmWay(tags)", "in range(0, len(way.points), self.max_points_in_way - 1) ] new_ways = [ way ] +", "the result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags = {} for (index, field_name,", "must # accompany any distribution of this code. import logging from osgeo import", "= (rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in self.__unique_node_index:", "is turned on. if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with no rings?\") elif ogrgeometry.GetGeometryCount()", "ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type", "is None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO", "def __parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon maps easily to osm multipolygon, so", "return result else: relation = self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except:", "new_points): new_way.points = points if new_way.id != way.id: for point in points: point.removeparent(way)", "field_name, field_type) in layer_fields: field_value = '' if field_type == ogr.OFTString: field_value =", "maxx, miny, maxy) def __round_number(self, n): return int(round(n * 10**self.rounding_digits)) def __add_node(self, x,", "should not) change behavior when simplify relations is turned on. if ogrgeometry.GetGeometryCount() ==", "so we can't call parsePoint on it # and instead have to create", "= [] layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i,", "points: point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members =", "return way def __parse_polygon(self, ogrgeometry, tags): # Special case polygons with only one", "ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection,", "of Vermont # <<EMAIL>>, github contributors # Released under the MIT license, as", "elif ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result)", "\"outer\")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {})", "ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry):", "return layer_fields # This function builds up a dictionary with the source data", "self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags) # LineString.GetPoint()", "else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]:", "in points: point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members", "= [ way.points[i:i + self.max_points_in_way] \\ for i in range(0, len(way.points), self.max_points_in_way -", "code. import logging from osgeo import ogr from osgeo import osr from .osm_geometries", "so special case it # TODO: Does anything else need special casing? geometry_type", "relation = self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom,", "None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance:", "self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds self.__bounds = OsmBoundary() self.__nodes = [] self.__unique_node_index", "# This function builds up a dictionary with the source data attributes #", "= ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self, n): return int(round(n * 10**self.rounding_digits))", "tags, False) def __parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags) # LineString.GetPoint() returns a", "with only one ring. This does not (or at least # should not)", "if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id]", "= OsmBoundary() self.__nodes = [] self.__unique_node_index = {} self.__ways = [] self.__relations =", "self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon with no exterior ring?\")", "add_bounds self.__bounds = OsmBoundary() self.__nodes = [] self.__unique_node_index = {} self.__ways = []", "= max_points_in_way self.add_bounds = add_bounds self.__bounds = OsmBoundary() self.__nodes = [] self.__unique_node_index =", "range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return", "10**self.rounding_digits)) def __add_node(self, x, y, tags, is_way_member): rx = self.__round_number(x) ry = self.__round_number(y)", "ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = [] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return", "ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry, type %s\" %", "way.points.append(node) node.addparent(way) previous_node_id = node.id return way def __parse_polygon(self, ogrgeometry, tags): # Special", "from .osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData: def __init__(self, translation, rounding_digits=7,", "= set() def __get_layer_fields(self, layer): layer_fields = [] layer_def = layer.GetLayerDefn() for i", "self.max_points_in_way < 2: # pointless :-) return logging.debug(\"Splitting long ways\") for way in", "add_bounds=False): # options self.translation = translation self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds", "need special casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]:", "return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields,", "new_ways = [ way ] + [ OsmWay(way.get_tags()) for i in range(len(new_points) -", "def __add_node(self, x, y, tags, is_way_member): rx = self.__round_number(x) ry = self.__round_number(y) unique_node_id", "not (or at least # should not) change behavior when simplify relations is", "self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i)", "] def __parse_geometry(self, ogrgeometry, tags): osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType() if geometry_type", "= self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry)", "= ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx, miny,", "source_encoding) if feature_tags is None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry,", "new_relation.members = [ (way, \"outer\") for way in way_parts ] for way in", "__split_way_in_relation(self, rel, way_parts): way_roles = [ m[1] for m in rel.members if m[0]", "[] self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer): layer_fields = [] layer_def = layer.GetLayerDefn()", "self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny,", "{}) member.addparent(relation) relation.members.append((member, \"member\")) return [ relation ] def __parse_geometry(self, ogrgeometry, tags): osmgeometries", "if len(way_roles) == 0 else way_roles[0] for way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role))", "for i in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer)", "= len([ p for p in way.get_parents() if type(p) == OsmRelation ]) >", "self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways()", "it # TODO: Does anything else need special casing? geometry_type = ogrgeometry.GetGeometryType() if", "[] layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(),", "__exit__(self, exception_type, value, traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter)", "len(self.__nodes) self.__nodes.append(node) return node def __add_way(self, tags): way = OsmWay(tags) self.__ways.append(way) return way", "instead have to create the point ourself previous_node_id = None for i in", "call parsePoint on it # and instead have to create the point ourself", "this code. import logging from osgeo import ogr from osgeo import osr from", "translation self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds self.__bounds = OsmBoundary()", "m[0] == way_parts[0] ] way_role = \"\" if len(way_roles) == 0 else way_roles[0]", "__merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members = [ (way, \"outer\") for way in", "rel, way_parts): way_roles = [ m[1] for m in rel.members if m[0] ==", "# <<EMAIL>>, github contributors # Released under the MIT license, as given in", "tags) ] elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = [] for", "ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {}, True) if previous_node_id == None or previous_node_id", "relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom,", "= [ way ] + [ OsmWay(way.get_tags()) for i in range(len(new_points) - 1)", "polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\"))", "None for i in range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x,", "{}, True) if previous_node_id == None or previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id", "ourself previous_node_id = None for i in range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i)", "osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]:", "source_encoding, reproject = lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature", "for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for i in range(datasource.get_layer_count()):", "github contributors # Released under the MIT license, as given in the file", "self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter): self.datawriter = datawriter", "zip(new_ways, new_points): new_way.points = points if new_way.id != way.id: for point in points:", "# pointless :-) return logging.debug(\"Splitting long ways\") for way in self.__ways: is_way_in_relation =", "self.__nodes = [] self.__unique_node_index = {} self.__ways = [] self.__relations = [] self.__long_ways_from_polygons", "self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer): layer_fields = [] layer_def = layer.GetLayerDefn() for", "geometry, type %s\" % str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject", "]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points = [ way.points[i:i +", "else: node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node def", "__add_node(self, x, y, tags, is_way_member): rx = self.__round_number(x) ry = self.__round_number(y) unique_node_id =", "= field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope()", "len([ p for p in way.get_parents() if type(p) == OsmRelation ]) > 0", "import logging from osgeo import ogr from osgeo import osr from .osm_geometries import", "max_points_in_way=1800, add_bounds=False): # options self.translation = translation self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way", "if field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] =", "= [] self.__relations = [] self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer): layer_fields =", "to create the point ourself previous_node_id = None for i in range(ogrgeometry.GetPointCount()): (x,", "geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points = [ way.points[i:i", "self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for i", "relation.members.append((interior, \"inner\")) return relation def __parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon maps easily", "> 0 if len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if not is_way_in_relation:", "easily to osm multipolygon, so special case it # TODO: Does anything else", "ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points = [ way.points[i:i + self.max_points_in_way] \\", "osm multipolygon, so special case it # TODO: Does anything else need special", "ry, tags) if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y,", "p in way.get_parents() if type(p) == OsmRelation ]) > 0 if len(way.points) >", "MIT license, as given in the file LICENSE, which must # accompany any", "casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount()", "rel.members if m[0] == way_parts[0] ] way_role = \"\" if len(way_roles) == 0", "import OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False):", "return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries", "(c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The University of Vermont #", "self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points = [ way.points[i:i + self.max_points_in_way]", "ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in", "len(way_roles) == 0 else way_roles[0] for way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def", "def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation = translation self.rounding_digits =", ".osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800,", "self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon with no exterior ring?\") return None relation.members.append((exterior, \"outer\"))", "given in the file LICENSE, which must # accompany any distribution of this", "ogrgeometry is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is None:", "None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature,", "else: relation = self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon with", "maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self, n): return", "= self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon with no exterior ring?\") return None relation.members.append((exterior,", "relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior,", "try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon with no exterior ring?\") return", "# Copyright (c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The University of", "if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def", "relation.members.append((member, \"member\")) return [ relation ] def __parse_geometry(self, ogrgeometry, tags): osmgeometries = []", "way_role)) def split_long_ways(self): if self.max_points_in_way < 2: # pointless :-) return logging.debug(\"Splitting long", "we can't call parsePoint on it # and instead have to create the", "ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags)", "= ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if", "if ogrgeometry is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is", "geom in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation):", "self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries", "] way_role = \"\" if len(way_roles) == 0 else way_roles[0] for way in", "else: logging.warning(\"Unhandled geometry, type %s\" % str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature, layer_fields,", "= self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\")) return [ relation ] def __parse_geometry(self, ogrgeometry,", "\"\" if len(way_roles) == 0 else way_roles[0] for way in way_parts[1:]: way.addparent(rel) rel.members.append((way,", "datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature()", "= layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter): self.datawriter", "[] self.__relations = [] self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer): layer_fields = []", "for (index, field_name, field_type) in layer_fields: field_value = '' if field_type == ogr.OFTString:", "under the MIT license, as given in the file LICENSE, which must #", "(layer, reproject) = datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()):", "member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\")) return [ relation ] def __parse_geometry(self,", "ry = self.__round_number(y) unique_node_id = None if is_way_member: unique_node_id = (rx, ry) else:", "[] geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags))", "is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is None: return", "self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run in __parse_geometry to", "self.add_bounds = add_bounds self.__bounds = OsmBoundary() self.__nodes = [] self.__unique_node_index = {} self.__ways", "= OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags,", "if is_way_member: unique_node_id = (rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if", "way_parts ] for way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles =", "field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx,", "'' if field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name]", "elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = [] for linestring in", "in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString,", "way.get_parents() if type(p) == OsmRelation ]) > 0 if len(way.points) > self.max_points_in_way: way_parts", "__enter__(self): self.datawriter.open() return self.datawriter def __exit__(self, exception_type, value, traceback): self.datawriter.close() def output(self, datawriter):", "exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon with no exterior ring?\") return None", "no exterior ring?\") return None relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryCount()): interior", "not is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents(): self.__split_way_in_relation(rel,", "self.__bounds = OsmBoundary() self.__nodes = [] self.__unique_node_index = {} self.__ways = [] self.__relations", "for i in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return", "passes them to the filter_tags function, returning the result. def __get_feature_tags(self, ogrfeature, layer_fields,", "int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return [ relation", "< 2: # pointless :-) return logging.debug(\"Splitting long ways\") for way in self.__ways:", "\"outer\")) for i in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\"))", "second loop for osmgeometry in [ geom for geom in osmgeometries if geom", "__get_layer_fields(self, layer): layer_fields = [] layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def", "self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node def __add_way(self, tags): way = OsmWay(tags) self.__ways.append(way)", "way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles = [ m[1] for m in", "geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation = self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member", "[ geom for geom in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def", "= {} for (index, field_name, field_type) in layer_fields: field_value = '' if field_type", "= OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node def __add_way(self, tags):", "m in rel.members if m[0] == way_parts[0] ] way_role = \"\" if len(way_roles)", "relation = self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member,", "new_way.id != way.id: for point in points: point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self,", "ogrfeature, layer_fields, source_encoding): tags = {} for (index, field_name, field_type) in layer_fields: field_value", "== 0 else way_roles[0] for way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self):", "return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is None: return reproject(ogrgeometry) if", "OsmPoint, OsmWay, OsmRelation class OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options", "for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields #", "in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\"))", "= [] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation =", "for way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles = [ m[1]", "OsmWay(way.get_tags()) for i in range(len(new_points) - 1) ] if not is_way_in_relation: for new_way", "self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = []", "University of Vermont # <<EMAIL>>, github contributors # Released under the MIT license,", "return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx,", "(x, y, z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {}, True) if previous_node_id", "on. if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with no rings?\") elif ogrgeometry.GetGeometryCount() == 1:", "def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members = [ (way, \"outer\") for way", "in way_parts ] for way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles", "if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run in __parse_geometry", "= '' if field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index)", "change behavior when simplify relations is turned on. if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon", "for i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\")) return [", "in new_ways[1:]: self.__ways.append(new_way) for new_way, points in zip(new_ways, new_points): new_way.points = points if", "if m[0] == way_parts[0] ] way_role = \"\" if len(way_roles) == 0 else", "__parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags) # LineString.GetPoint() returns a tuple, so we", "else: for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for i in", "if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1: relation =", "license, as given in the file LICENSE, which must # accompany any distribution", "self.__ways = [] self.__relations = [] self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer): layer_fields", "layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields # This function builds up a dictionary with", "layer): layer_fields = [] layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def =", "OsmRelation ]) > 0 if len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if", "relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in [ ogr.wkbMultiLineString,", "= self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation =", "maxy) def __round_number(self, n): return int(round(n * 10**self.rounding_digits)) def __add_node(self, x, y, tags,", "tags)) elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D,", "i in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation", "way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way < 2: # pointless :-) return", "in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for", "= self.__round_number(x) ry = self.__round_number(y) unique_node_id = None if is_way_member: unique_node_id = (rx,", "not) change behavior when simplify relations is turned on. if ogrgeometry.GetGeometryCount() == 0:", "for new_way, points in zip(new_ways, new_points): new_way.points = points if new_way.id != way.id:", "is_way_in_relation = len([ p for p in way.get_parents() if type(p) == OsmRelation ])", "# and instead have to create the point ourself previous_node_id = None for", "= rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds self.__bounds = OsmBoundary() self.__nodes =", "def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry, tags):", "__get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags = {} for (index, field_name, field_type) in layer_fields:", "self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts)", "way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way < 2: # pointless :-)", "range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation = self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()):", "= ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return [ relation ]", "if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with no rings?\") elif ogrgeometry.GetGeometryCount() == 1: result", "is_way_in_relation) if not is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in", "in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource):", "maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self, n): return int(round(n *", "run in __parse_geometry to avoid second loop for osmgeometry in [ geom for", "reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter): self.datawriter = datawriter def __enter__(self): self.datawriter.open()", "layer_fields, source_encoding, reproject = lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if", "__calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def", "tags): relation = OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(),", "not is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way) for new_way, points in zip(new_ways, new_points):", "osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run in __parse_geometry to avoid second", "def __enter__(self): self.datawriter.open() return self.datawriter def __exit__(self, exception_type, value, traceback): self.datawriter.close() def output(self,", "[ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D", "the filter_tags function, returning the result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags =", "ways\") for way in self.__ways: is_way_in_relation = len([ p for p in way.get_parents()", "False) def __parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags) # LineString.GetPoint() returns a tuple,", "return self.datawriter def __exit__(self, exception_type, value, traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways,", "for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior,", "ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags))", "for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation)", "if layer: layer_fields = self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature,", "lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is None: return", "= self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation def __parse_collection(self, ogrgeometry, tags): #", "max_points_in_way self.add_bounds = add_bounds self.__bounds = OsmBoundary() self.__nodes = [] self.__unique_node_index = {}", "layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter): self.datawriter =", "ogr from osgeo import osr from .osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation class", "= self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject)", "of this code. import logging from osgeo import ogr from osgeo import osr", "# accompany any distribution of this code. import logging from osgeo import ogr", "to the filter_tags function, returning the result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags", "utf-8 -*- # Copyright (c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The", "source_encoding): tags = {} for (index, field_name, field_type) in layer_fields: field_value = ''", "field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip()", "new_relation = self.__add_relation({}) new_relation.members = [ (way, \"outer\") for way in way_parts ]", "with the source data attributes # and passes them to the filter_tags function,", "exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom", "\\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry, type %s\"", "and instead have to create the point ourself previous_node_id = None for i", "in self.__ways: is_way_in_relation = len([ p for p in way.get_parents() if type(p) ==", "way_parts) def process(self, datasource): for i in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if", "\\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled", "way def __add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry,", "field_value = '' if field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value =", "def __add_way(self, tags): way = OsmWay(tags) self.__ways.append(way) return way def __add_relation(self, tags): relation", "anything else need special casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon,", "ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return", "= lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is None:", "point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members = [ (way,", "# and passes them to the filter_tags function, returning the result. def __get_feature_tags(self,", "geom for geom in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self,", "DataWriterContextManager: def __init__(self, datawriter): self.datawriter = datawriter def __enter__(self): self.datawriter.open() return self.datawriter def", "case it # TODO: Does anything else need special casing? geometry_type = ogrgeometry.GetGeometryType()", "ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]:", "\"outer\") for way in way_parts ] for way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self,", "in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way < 2: # pointless", "for point in points: point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts): new_relation =", "[ way.points[i:i + self.max_points_in_way] \\ for i in range(0, len(way.points), self.max_points_in_way - 1)", "return relation def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self,", "node.id return way def __parse_polygon(self, ogrgeometry, tags): # Special case polygons with only", "i in range(0, len(way.points), self.max_points_in_way - 1) ] new_ways = [ way ]", "way ] + [ OsmWay(way.get_tags()) for i in range(len(new_points) - 1) ] if", "field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx,", "previous_node_id = None for i in range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i) node", "[ OsmWay(way.get_tags()) for i in range(len(new_points) - 1) ] if not is_way_in_relation: for", "return [ relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in", "turned on. if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with no rings?\") elif ogrgeometry.GetGeometryCount() ==", "= self.__add_node(x, y, {}, True) if previous_node_id == None or previous_node_id != node.id:", "layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType()))", "in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points", "[] self.__unique_node_index = {} self.__ways = [] self.__relations = [] self.__long_ways_from_polygons = set()", "the MIT license, as given in the file LICENSE, which must # accompany", "geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry,", "field_type) in layer_fields: field_value = '' if field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding)", "self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x,", "2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The University of Vermont # <<EMAIL>>,", "case polygons with only one ring. This does not (or at least #", "y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node def __add_way(self, tags): way =", "\\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry, type %s\" % str(geometry_type)) return", "range(0, len(way.points), self.max_points_in_way - 1) ] new_ways = [ way ] + [", "node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node def __add_way(self,", "Special case polygons with only one ring. This does not (or at least", "rings?\") elif ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way:", "= len(self.__nodes) self.__nodes.append(node) return node def __add_way(self, tags): way = OsmWay(tags) self.__ways.append(way) return", "OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node def __add_way(self, tags): way", "returns a tuple, so we can't call parsePoint on it # and instead", "relations is turned on. if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with no rings?\") elif", "way_parts = self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else:", "osgeo import osr from .osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData: def", "__split_way(self, way, is_way_in_relation): new_points = [ way.points[i:i + self.max_points_in_way] \\ for i in", "ogr.wkbMultiLineString25D ]: geometries = [] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries", "> self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation = self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0),", "= points if new_way.id != way.id: for point in points: point.removeparent(way) point.addparent(new_way) return", "self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter) as dw: dw.write_header(self.__bounds) dw.write_nodes(self.__nodes)", "linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation = self.__add_relation(tags) for i", "tags): way = self.__add_way(tags) # LineString.GetPoint() returns a tuple, so we can't call", "= [] self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer): layer_fields = [] layer_def =", "if ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom =", "for geom in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way,", "osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points =", "in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation def", "logging.warning(\"Polygon with no rings?\") elif ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if", "= ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type", "output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter) as dw: dw.write_header(self.__bounds) dw.write_nodes(self.__nodes) dw.write_ways(self.__ways) dw.write_relations(self.__relations)", "coding: utf-8 -*- # Copyright (c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>,", "result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags = {} for (index, field_name, field_type)", "self.__add_node(x, y, {}, True) if previous_node_id == None or previous_node_id != node.id: way.points.append(node)", "OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation = translation self.rounding_digits", "def __round_number(self, n): return int(round(n * 10**self.rounding_digits)) def __add_node(self, x, y, tags, is_way_member):", "layer_fields: field_value = '' if field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value", "True) if previous_node_id == None or previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id =", "return new_ways def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members = [ (way, \"outer\")", "layer_fields # This function builds up a dictionary with the source data attributes", "for p in way.get_parents() if type(p) == OsmRelation ]) > 0 if len(way.points)", "osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry, type %s\" % str(geometry_type)) return osmgeometries def add_feature(self,", "self.translation = translation self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds self.__bounds", "def __calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy)", "create the point ourself previous_node_id = None for i in range(ogrgeometry.GetPointCount()): (x, y,", "in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields # This function", "function, returning the result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags = {} for", "behavior when simplify relations is turned on. if ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with", "def __parse_polygon(self, ogrgeometry, tags): # Special case polygons with only one ring. This", "if ogrfilteredfeature is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return", "new_way in new_ways[1:]: self.__ways.append(new_way) for new_way, points in zip(new_ways, new_points): new_way.points = points", "import osr from .osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData: def __init__(self,", "i in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer) for", "point ourself previous_node_id = None for i in range(ogrgeometry.GetPointCount()): (x, y, z_unused) =", "= translation self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds self.__bounds =", "geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = [] for linestring in range(ogrgeometry.GetGeometryCount()):", "self.datawriter.open() return self.datawriter def __exit__(self, exception_type, value, traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes,", "ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [", "does not (or at least # should not) change behavior when simplify relations", "ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry, type %s\" % str(geometry_type)) return osmgeometries", "ogrgeometry, tags): # Special case polygons with only one ring. This does not", "in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing,", "least # should not) change behavior when simplify relations is turned on. if", "exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior =", "does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry,", "range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation def __parse_collection(self,", "def __exit__(self, exception_type, value, traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with", "the point ourself previous_node_id = None for i in range(ogrgeometry.GetPointCount()): (x, y, z_unused)", "= [ (way, \"outer\") for way in way_parts ] for way in way_parts:", "!= way.id: for point in points: point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts):", "way_role = \"\" if len(way_roles) == 0 else way_roles[0] for way in way_parts[1:]:", "self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run in __parse_geometry to avoid second loop for", "geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbLineString,", "new_ways def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members = [ (way, \"outer\") for", "reproject) if ogrfilteredfeature is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None:", "layer_fields = self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding,", "ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags) for polygon in", "in [ geom for geom in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry)", "] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D", "osgeo import ogr from osgeo import osr from .osm_geometries import OsmBoundary, OsmPoint, OsmWay,", "them to the filter_tags function, returning the result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding):", "is_way_member: unique_node_id = (rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id", "ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()):", "osmgeometry in [ geom for geom in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature,", "in range(len(new_points) - 1) ] if not is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way)", "[ way ] + [ OsmWay(way.get_tags()) for i in range(len(new_points) - 1) ]", "= add_bounds self.__bounds = OsmBoundary() self.__nodes = [] self.__unique_node_index = {} self.__ways =", "range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {}, True) if", "way_roles[0] for way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way <", ":-) return logging.debug(\"Splitting long ways\") for way in self.__ways: is_way_in_relation = len([ p", "self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self, n): return int(round(n * 10**self.rounding_digits)) def __add_node(self,", "new_way.points = points if new_way.id != way.id: for point in points: point.removeparent(way) point.addparent(new_way)", "previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id return way def __parse_polygon(self, ogrgeometry,", "have to create the point ourself previous_node_id = None for i in range(ogrgeometry.GetPointCount()):", "osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject = lambda geometry: None): ogrfilteredfeature =", "return None relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {})", "- 1) ] if not is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way) for new_way,", "LICENSE, which must # accompany any distribution of this code. import logging from", "len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if way in", "= [] geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry,", "in way.get_parents() if type(p) == OsmRelation ]) > 0 if len(way.points) > self.max_points_in_way:", "ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else:", "= self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {})", "self.__ways: is_way_in_relation = len([ p for p in way.get_parents() if type(p) == OsmRelation", "exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif", "{}) exterior.addparent(relation) except: logging.warning(\"Polygon with no exterior ring?\") return None relation.members.append((exterior, \"outer\")) for", "MultiPolygon maps easily to osm multipolygon, so special case it # TODO: Does", "type %s\" % str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject =", "m[1] for m in rel.members if m[0] == way_parts[0] ] way_role = \"\"", "<<EMAIL>>, The University of Vermont # <<EMAIL>>, github contributors # Released under the", "{} self.__ways = [] self.__relations = [] self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer):", "ogrfilteredfeature is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return feature_tags", "def process(self, datasource): for i in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if layer:", "The University of Vermont # <<EMAIL>>, github contributors # Released under the MIT", "layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields", "layer_fields, reproject) if ogrfilteredfeature is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is", "__init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation = translation self.rounding_digits = rounding_digits", "+ self.max_points_in_way] \\ for i in range(0, len(way.points), self.max_points_in_way - 1) ] new_ways", "ogrgeometry): (minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self,", "None if is_way_member: unique_node_id = (rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags)", "def __add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry, tags):", "== None or previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id return way", "self.__ways.append(way) return way def __add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation) return relation def", "[ relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in [", "returning the result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags = {} for (index,", "ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in", "ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry, type %s\" % str(geometry_type))", "else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else:", "way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles = [ m[1] for", "range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer) for j in", "self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter): self.datawriter = datawriter def __enter__(self): self.datawriter.open() return", "way_roles = [ m[1] for m in rel.members if m[0] == way_parts[0] ]", "for i in range(0, len(way.points), self.max_points_in_way - 1) ] new_ways = [ way", "not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags))", "for osmgeometry in [ geom for geom in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry,", "tags): # Special case polygons with only one ring. This does not (or", "in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags))", "unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node", "Vermont # <<EMAIL>>, github contributors # Released under the MIT license, as given", "== way_parts[0] ] way_role = \"\" if len(way_roles) == 0 else way_roles[0] for", "source data attributes # and passes them to the filter_tags function, returning the", "-*- coding: utf-8 -*- # Copyright (c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME>", "ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry,", "import ogr from osgeo import osr from .osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation", "self.__relations = [] self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer): layer_fields = [] layer_def", "return relation def __parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon maps easily to osm", "at least # should not) change behavior when simplify relations is turned on.", "reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run in", "for m in rel.members if m[0] == way_parts[0] ] way_role = \"\" if", "for way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way < 2:", "self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node", "== 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result", "tags)) elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in", "tags) if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y, tags)", "<<EMAIL>>, # <NAME> <<EMAIL>>, The University of Vermont # <<EMAIL>>, github contributors #", "ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self, n): return int(round(n * 10**self.rounding_digits)) def", "exterior ring?\") return None relation.members.append((exterior, \"outer\")) for i in range(1, ogrgeometry.GetGeometryCount()): interior =", "i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\")) return [ relation", "i in range(len(new_points) - 1) ] if not is_way_in_relation: for new_way in new_ways[1:]:", "]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D", "options self.translation = translation self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds", "]) > 0 if len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if not", "previous_node_id == None or previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id return", "Released under the MIT license, as given in the file LICENSE, which must", "in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def", "elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D,", "(or at least # should not) change behavior when simplify relations is turned", "exterior.addparent(relation) except: logging.warning(\"Polygon with no exterior ring?\") return None relation.members.append((exterior, \"outer\")) for i", "dictionary with the source data attributes # and passes them to the filter_tags", "is_way_member): rx = self.__round_number(x) ry = self.__round_number(y) unique_node_id = None if is_way_member: unique_node_id", "None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is None: return ogrgeometry =", "else way_roles[0] for way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way", "def __parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags) # LineString.GetPoint() returns a tuple, so", "len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation = self.__add_relation(tags) try: exterior =", "class OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation = translation", "maps easily to osm multipolygon, so special case it # TODO: Does anything", "for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation = self.__add_relation(tags) for", "way_parts[0] ] way_role = \"\" if len(way_roles) == 0 else way_roles[0] for way", "is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return feature_tags =", "= self.__round_number(y) unique_node_id = None if is_way_member: unique_node_id = (rx, ry) else: unique_node_id", "str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject = lambda geometry: None):", "in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for i in range(datasource.get_layer_count()): (layer, reproject)", "way = OsmWay(tags) self.__ways.append(way) return way def __add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation)", "range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields # This function builds", "node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id return way def __parse_polygon(self, ogrgeometry, tags): #", "way def __parse_polygon(self, ogrgeometry, tags): # Special case polygons with only one ring.", "is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts)", "= ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {}, True) if previous_node_id == None or", "(rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in self.__unique_node_index: return", "add_feature(self, ogrfeature, layer_fields, source_encoding, reproject = lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields,", "one ring. This does not (or at least # should not) change behavior", "as given in the file LICENSE, which must # accompany any distribution of", "point in points: point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({})", "OsmBoundary() self.__nodes = [] self.__unique_node_index = {} self.__ways = [] self.__relations = []", "up a dictionary with the source data attributes # and passes them to", "miny, maxy) def __round_number(self, n): return int(round(n * 10**self.rounding_digits)) def __add_node(self, x, y,", "1) ] new_ways = [ way ] + [ OsmWay(way.get_tags()) for i in", "ogrgeometry, tags): way = self.__add_way(tags) # LineString.GetPoint() returns a tuple, so we can't", "interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation def __parse_collection(self, ogrgeometry, tags):", "# TODO: Does anything else need special casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type", "# should not) change behavior when simplify relations is turned on. if ogrgeometry.GetGeometryCount()", "[ relation ] def __parse_geometry(self, ogrgeometry, tags): osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType()", "= OsmWay(tags) self.__ways.append(way) return way def __add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation) return", "= self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning(\"Polygon with no exterior", "multipolygon, so special case it # TODO: Does anything else need special casing?", "exception_type, value, traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter) as", "geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is None: return ogrgeometry", "OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): #", "self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation def __parse_collection(self, ogrgeometry, tags): # OGR", "\"inner\")) return relation def __parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon maps easily to", "self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for", "layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields # This function builds up a dictionary", "= self.__add_relation({}) new_relation.members = [ (way, \"outer\") for way in way_parts ] for", "self.__long_ways_from_polygons.add(result) return result else: relation = self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation)", "process(self, datasource): for i in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if layer: layer_fields", "int(round(n * 10**self.rounding_digits)) def __add_node(self, x, y, tags, is_way_member): rx = self.__round_number(x) ry", "self.__add_relation({}) new_relation.members = [ (way, \"outer\") for way in way_parts ] for way", "unique_node_id = None if is_way_member: unique_node_id = (rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx,", "def __parse_geometry(self, ogrgeometry, tags): osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType() if geometry_type in", "pointless :-) return logging.debug(\"Splitting long ways\") for way in self.__ways: is_way_in_relation = len([", "if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation = self.__add_relation(tags) try: exterior", "if not is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way) for new_way, points in zip(new_ways,", "rx = self.__round_number(x) ry = self.__round_number(y) unique_node_id = None if is_way_member: unique_node_id =", "split_long_ways(self): if self.max_points_in_way < 2: # pointless :-) return logging.debug(\"Splitting long ways\") for", "== 0: logging.warning(\"Polygon with no rings?\") elif ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0),", "]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry, type %s\" % str(geometry_type)) return osmgeometries def", "return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return", "{}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation def __parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon", "interior.addparent(relation) relation.members.append((interior, \"inner\")) return relation def __parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon maps", "self.max_points_in_way] \\ for i in range(0, len(way.points), self.max_points_in_way - 1) ] new_ways =", "__parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon maps easily to osm multipolygon, so special", "datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter) as dw: dw.write_header(self.__bounds) dw.write_nodes(self.__nodes) dw.write_ways(self.__ways) dw.write_relations(self.__relations) dw.write_footer()", "# LineString.GetPoint() returns a tuple, so we can't call parsePoint on it #", "for way in self.__ways: is_way_in_relation = len([ p for p in way.get_parents() if", "ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning(\"Unhandled geometry, type", "way = self.__add_way(tags) # LineString.GetPoint() returns a tuple, so we can't call parsePoint", "= ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i in range(1,", "file LICENSE, which must # accompany any distribution of this code. import logging", "def __split_way(self, way, is_way_in_relation): new_points = [ way.points[i:i + self.max_points_in_way] \\ for i", "return logging.debug(\"Splitting long ways\") for way in self.__ways: is_way_in_relation = len([ p for", "ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry, tags): way =", "len(way.points), self.max_points_in_way - 1) ] new_ways = [ way ] + [ OsmWay(way.get_tags())", "self.max_points_in_way - 1) ] new_ways = [ way ] + [ OsmWay(way.get_tags()) for", "ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy)", "feature_tags is None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) #", "for way in way_parts ] for way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel,", "if feature_tags is None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags)", "logging.warning(\"Unhandled geometry, type %s\" % str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding,", "unique_node_id = (rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in", "self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node)", "0 if len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if", "def __init__(self, datawriter): self.datawriter = datawriter def __enter__(self): self.datawriter.open() return self.datawriter def __exit__(self,", "[ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags) for polygon", "ogrgeometry.GetGeometryCount() == 0: logging.warning(\"Polygon with no rings?\") elif ogrgeometry.GetGeometryCount() == 1: result =", "in rel.members if m[0] == way_parts[0] ] way_role = \"\" if len(way_roles) ==", "y, {}, True) if previous_node_id == None or previous_node_id != node.id: way.points.append(node) node.addparent(way)", "return [ relation ] def __parse_geometry(self, ogrgeometry, tags): osmgeometries = [] geometry_type =", "self.__unique_node_index = {} self.__ways = [] self.__relations = [] self.__long_ways_from_polygons = set() def", "y, tags, is_way_member): rx = self.__round_number(x) ry = self.__round_number(y) unique_node_id = None if", "# -*- coding: utf-8 -*- # Copyright (c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, #", "> 1: relation = self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior", "tags)) else: logging.warning(\"Unhandled geometry, type %s\" % str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature,", "range(len(new_points) - 1) ] if not is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way) for", "def split_long_ways(self): if self.max_points_in_way < 2: # pointless :-) return logging.debug(\"Splitting long ways\")", "interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, \"inner\")) return [ relation ] else: return", "osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\", "way in self.__ways: is_way_in_relation = len([ p for p in way.get_parents() if type(p)", "way.points[i:i + self.max_points_in_way] \\ for i in range(0, len(way.points), self.max_points_in_way - 1) ]", "for i in range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x, y,", "= {} self.__ways = [] self.__relations = [] self.__long_ways_from_polygons = set() def __get_layer_fields(self,", "avoid second loop for osmgeometry in [ geom for geom in osmgeometries if", "range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, \"member\")) return [ relation ] def", "def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject = lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature,", "ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\", "set() def __get_layer_fields(self, layer): layer_fields = [] layer_def = layer.GetLayerDefn() for i in", "except: logging.warning(\"Polygon with no exterior ring?\") return None relation.members.append((exterior, \"outer\")) for i in", "in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags) for", "elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [", "return way def __add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self,", "tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags)", "ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, \"outer\")) for i in", "geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \\ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \\ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \\", "relation ] def __parse_geometry(self, ogrgeometry, tags): osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType() if", "self.__round_number(x) ry = self.__round_number(y) unique_node_id = None if is_way_member: unique_node_id = (rx, ry)", "ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef()", "<NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The University of Vermont # <<EMAIL>>, github", "in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer) for j", "way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self,", "miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self, n): return int(round(n", "tags = {} for (index, field_name, field_type) in layer_fields: field_value = '' if", "function builds up a dictionary with the source data attributes # and passes", "point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members = [", "range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self,", "__add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry, tags): return", "self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def", "long ways\") for way in self.__ways: is_way_in_relation = len([ p for p in", "tags)) elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not", "translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation = translation self.rounding_digits = rounding_digits self.max_points_in_way", "> self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if way in self.__long_ways_from_polygons:", "self.datawriter def __exit__(self, exception_type, value, traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations)", "only one ring. This does not (or at least # should not) change", "__parse_geometry(self, ogrgeometry, tags): osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [", "]: if ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom", "= [] self.__unique_node_index = {} self.__ways = [] self.__relations = [] self.__long_ways_from_polygons =", "in __parse_geometry to avoid second loop for osmgeometry in [ geom for geom" ]
[ "of freedom of each final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = [] u_trace", "data given our new parameters cur_nll += _lambda * np.abs(u['r']).sum() # Track the", "Note that this method only works for the 1-D fused lasso case.''' if", "grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta']))", "< converge_threshold # Update step-size parameter based on norm of primal and dual", "cached LU decomposition.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers')", "# Update constraint term r arg = s - t_dual local_lambda = (_lambda", "approximation...') # Cache the exponentiated beta exp_beta = np.exp(beta) # Form the parameters", "# Calculate BIC = -2ln(L) + k * (ln(n) - ln(2pi)) bic_trace[i] =", "= [] # ''' Load the graph fused lasso library ''' # graphfl_lib", "= initial_values['z'] # u = initial_values['u'] # n = y.shape[0] # self.graphfl_weight(n, y,", "_lambda / a) dual_residual = a * D.T.dot(z_new - z) z = z_new", "'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve", "np.array([data[x,y] for x,y in p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in", "* signal_dist.pdf(data) + (1. - c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform", "plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum()", "verbose: print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if", "or bic_trace[i] < bic_trace[best_idx]: best_idx = i best_plateaus = plateaus # Save the", "fused lasso library that has an optimized ADMM routine.''' if verbose: print('\\t\\tSolving via", "jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev))", "+ 1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist,", "c_trace = [] results_trace = [] best_idx = None best_plateaus = None flat_data", "z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_coord_descent(self, x, A, _lambda, converge,", "* Dx + (1 - alpha) * z # over-relax Dx z_new =", "= len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 # Use the naive", "index as being checked so it's not re-checked unnecessarily check_map[local_idx] = True #", "= None best_plateaus = None flat_data = data.flatten() edges = penalties[3] if dual_solver", "= aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] -", "None: # beta = np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1], dtype='double') # u", "1d or 2d grid of beta values in linear time.''' to_check = deque(itertools.product(*[range(x)", "self.signal_dist, self.null_dist) for idx in p: weights[idx if grid_map is None else grid_map[idx[0],", "(s_new - s) z = z_new s = s_new # Dual update primal_residual_x", "sequential least squares u = self._u_slsqp(x, A, _lambda, verbose > 1, u0=u) elif", "# Check every possible boundary of the plateau while cur_unchecked: idx = cur_unchecked.popleft()", "u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion about the current iterate", "L = D.T.dot(D) W_over_a = np.diag(weights / a) x_denominator = W_over_a + L", "in exchange for varying the step size #W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a", "0.5 Kd = np.array([a] + [2*a] * (n-2) + [a]) + W #", "in p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in p: weights[idx if", "def add_step(self, w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters", "Ku, Kd, W * y + out) Dx = np.ediff1d(x) # Update z", "self.delta_iters = [] def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100,", "the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x) *", "matrix, represented as a vector for efficiency weights = 0.5 * exp_beta /", "prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative log-likelihood of the data given", "print('\\t\\tSolving u via Coordinate Descent') u = u0 if u0 is not None", "- s) z = z_new s = s_new # Dual update primal_residual_x =", "_lambda, converge, max_steps, verbose, u0=None): '''Solve for u using coordinate descent.''' if verbose:", "{1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic':", "post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative log-likelihood of the", "u = (u + a * primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm =", "a * D.T.dot(z_new - z) z = z_new primal_residual = Dx_relaxed - z", "using coordinate descent.''' if verbose: print('\\t\\tSolving u via Coordinate Descent') u = u0", "if u0 is not None else np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0) r", "max_member = val + rel_tol # Check every possible boundary of the plateau", "print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold)", "self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'),", "counter cur_step += 1 if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep", "# Check convergence primal_resnorm = np.sqrt((np.array([i for i in primal_residual_x] + [i for", "BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx],", "(prev_nll + converge) if verbose > 1: print('\\t\\tM-step delta: {0}'.format(delta)) # Increment the", "+ [i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold", "as np from scipy import sparse from scipy.stats import norm from scipy.optimize import", "!= 'graph': # Back out beta from the dual solution beta = y", "if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(),", "Coordinate Descent') u = u0 if u0 is not None else np.zeros(A.shape[1]) l2_norm_A", "= np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters = [] self.beta_iters = [] self.c_iters", "= _u_objective_func(u, x, A) cur_step = 0 while delta > converge and cur_step", "np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a + L) # Update the step counter", "np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm", "/ a * weights * y + D.T.dot(a * z - u) x", "post_prob = signal_weight / (signal_weight + null_weight) return post_prob def _m_step(self, beta, prior_prob,", "verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the signal probabilities prior_prob =", "elif len(beta.shape) == 1: if idx[0] > 0: local_check.append(idx[0] - 1) # left", "step size #W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights / a) #x_denominator", "def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u", "Set up system involving graph Laplacian L = D.T.dot(D) W_over_a = np.diag(weights /", "2 * dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] - 1.) # Calculate", "post_prob, beta): '''Calculate the log-likelihood of the betas given the weights and data.'''", "- (1. / weights) * penalties.T.dot(u) # Get the current log-likelihood cur_nll =", "trace of all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF:", "Update step-size parameter based on norm of primal and dual residuals # This", "z_new = _soft_threshold(Dx_relaxed + u / a, _lambda / a) dual_residual = a", "len(y) m = n - 1 a = _lambda # The D matrix", "self.null_dist) for idx in p: weights[idx if grid_map is None else grid_map[idx[0], idx[1]]]", "null_weight = (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def _e_step(self, data, prior_prob):", "unless edges are specified explicitly. ({0} given)'.format(len(beta.shape))) # Check the index's unchecked neighbors", "dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and", "= sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n) # likelihood term z =", "is None else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus =", "Dx - z #primal_residual = Dx_hat - z # Update u u =", "the final values results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge,", "beta_trace = [] u_trace = [] w_trace = [] c_trace = [] results_trace", "csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x", "primal_resnorm: a /= inflate u_dual *= inflate t_dual *= inflate # Update the", "tridiagonal representation # of K. Kd = np.array([a] + [2*a] * (n-2) +", "r_accel = alpha * r + (1 - alpha) * s # Projection", "system involving graph Laplacian L = D.T.dot(D) W_over_a = np.diag(weights / a) x_denominator", "# Update step-size parameter based on norm of primal and dual residuals #", "Create our box constraints bounds = [(-_lambda, _lambda) for _ in u0] #", "scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return", "#{0}'.format(len(plateaus) + 1)) # Get the next unchecked point on the grid idx", "beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0} AIC: {1} AICc: {2} BIC:", "(np.log(len(flat_data)) - np.log(2 * np.pi)) # Track the best model thus far if", "2. * dof_trace[i] - 2. * log_likelihood_trace[i] # Calculate AICc = AIC +", "dof_trace[i] - 1.) # Calculate BIC = -2ln(L) + k * (ln(n) -", "initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z,", "- np.log(2 * np.pi)) # Track the best model thus far if best_idx", "- A.T[coord] * u[coord] # Track the change in the objective function value", "x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i] =", "neighbors for local_idx in local_check: if not check_map[local_idx] \\ and beta[local_idx] >= min_member", "# Loop until every beta index has been checked while to_check: if verbose", "tracker prev_nll = cur_nll return beta, u def _m_log_likelihood(self, post_prob, beta): '''Calculate the", "Clear out all the info from the previous run self.reset() # Fit to", "verbose=False): '''Perform unpenalized 1-d regression for each of the plateaus.''' weights = np.zeros(data.shape)", "of each final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = [] u_trace = []", "= initial_values['z'] r = initial_values['r'] s = initial_values['s'] u_dual = initial_values['u_dual'] t_dual =", "if idx[0] < beta.shape[0] - 1: local_check.append(idx[0] + 1) # right # 2d", "= [] self.beta_iters = [] self.c_iters = [] self.delta_iters = [] # '''", "n = len(y) m = D.shape[0] a = inflate * _lambda # step-size", "for penalty u_dual = np.zeros(n) # scaled dual variable for constraint x =", "the step counter cur_step += 1 if verbose and cur_step % 100 ==", "alpha) * z # Over-relaxation z_new = _soft_threshold(Dx_hat + u / a, _lambda", "Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the step counter and update the previous", "_u_slsqp(self, x, A, _lambda, verbose, u0=None): '''Solve for u using sequential least squares.'''", "lambda value aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for each lambda value", "* self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight + null_weight) return post_prob def _m_step(self,", "The AICc score for each lambda value (correcting for finite sample size) bic_trace", "1d and 2d cases for now else: raise Exception('Degrees of freedom calculation does", "(k+1) / (n - k - 1) aicc_trace[i] = aic_trace[i] + 2 *", "alpha, inflate, max_steps, converge, beta, z, u) # return {'beta': beta, 'z': z,", "penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u) u = self._u_admm_lucache(y, weights, _lambda, penalties,", "partial from collections import deque from pygfl.solver import TrailSolver class GaussianKnown: ''' A", "# Use the naive DoF if verbose: print('Calculating AIC') # Get the negative", "admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm for the data with the given", "counter cur_step += 1 # Update the negative log-likelihood tracker prev_nll = cur_nll", "return {'beta': beta, 'z': z, 'u': u } self.solver.alpha = alpha self.solver.inflate =", "np.sqrt(weights) * y A = (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights =", "= 0 while delta > converge and cur_step < max_steps: if verbose >", "the exponentiated beta exp_beta = np.exp(beta) # Form the parameters for our weighted", "t_dual local_lambda = (_lambda - np.abs(arg) / 2.).clip(0) if adaptive else _lambda r", "u_dual = u_dual + primal_residual_x t_dual = t_dual + primal_residual_r # Check convergence", "np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The AIC score for each lambda value", "+ rel_tol # Check every possible boundary of the plateau while cur_unchecked: idx", "initial_values = results # Save the trace of all the resulting parameters beta_trace.append(results['beta'])", "# Increment the step counter and update the previous objective value cur_step +=", "cur_unchecked = deque([idx]) val = beta[idx] min_member = val - rel_tol max_member =", "idx[0] > 0: local_check.append(idx[0] - 1) # left if idx[0] < beta.shape[0] -", "case if edges is not None: local_check.extend(edges[idx]) # 1d case -- check left", "primal_resnorm > 5 * dual_resnorm: a *= inflate u_dual /= inflate t_dual /=", "if grid_map is not None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map != -1]] else:", "least squares if dual_solver != 'admm' and dual_solver != 'graph': # weights is", "prev_nll) / (prev_nll + converge) if verbose: print('\\tDelta: {0}'.format(delta)) # Track the step", "all the indices without reaching an unchecked one. if check_map[idx]: break # Create", "csc_matrix(np.eye(n))) # Cache the LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()]", "right elif len(beta.shape) == 1: if idx[0] > 0: local_check.append(idx[0] - 1) #", "= [(-_lambda, _lambda) for u0_i in u0] results = minimize(_u_objective_func, u0, args=(x, A),", "Laplacian L = D.T.dot(D) W_over_a = np.diag(weights / a) x_denominator = W_over_a +", "= [] dual_trace = [] converged = False cur_step = 0 while not", "idx = cur_unchecked.popleft() # neighbors to check local_check = [] # Generic graph", "= u_dual + primal_residual_x t_dual = t_dual + primal_residual_r # Check convergence primal_resnorm", "[] self.delta_iters = [] def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001,", "{1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self, x, A, _lambda, verbose, u0=None): '''Solve for", "> 5 * primal_resnorm: a /= inflate u_dual *= inflate t_dual *= inflate", "{0}'.format(delta)) # Track the step self.add_step(post_prob, beta, prior_prob, delta) # Increment the step", "def _graph_fused_lasso(self, y, weights, _lambda, ntrails, trails, breakpoints, edges, converge, max_steps, verbose, alpha,", "1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist, null_dist,", "alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def ilogit(x): return 1. / (1. + np.exp(-x))", "max_steps: if verbose > 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache the", "more than 2 dimensions unless edges are specified explicitly. ({0} given)'.format(len(beta.shape))) # Check", "verbose: print('\\tPlateau #{0}'.format(i+1)) # Get the subset of grid points for this plateau", "D) # where W is the diagonal matrix of weights. We use a", "/= inflate t_dual /= inflate elif dual_resnorm > 5 * primal_resnorm: a /=", "np.abs(prev_objective - cur_objective) / (prev_objective + converge) if verbose and cur_step % 100", "{'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self, y, weights, _lambda, D,", "= np.zeros(breakpoints[-1], dtype='double') # else: # beta = initial_values['beta'] # z = initial_values['z']", "u[coord] next_u = prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u))", "varying the step size #W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights /", "step-size # TODO: is this worth it? We're paying a matrix inverse in", "norm of primal and dual residuals a *= 2 if primal_resnorm > 10", "check left, right, up, and down elif len(beta.shape) == 2: if idx[0] >", "lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx]))", "constraint set arg = x_accel + u_dual + D.T.dot(r_accel + t_dual) z_new =", "= self._m_log_likelihood(post_prob, beta) delta = converge + 1 u = u0 cur_step =", "is not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i, _lambda in", "null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d regression on all of the", "_graph_fused_lasso(self, y, weights, _lambda, ntrails, trails, breakpoints, edges, converge, max_steps, verbose, alpha, inflate,", "the data with the given penalty matrix.''' delta = converge + 1 if", "_u_coord_descent(self, x, A, _lambda, converge, max_steps, verbose, u0=None): '''Solve for u using coordinate", "freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the results of the run return {'beta':", "the vector of betas if grid_map is not None: grid_points[grid_map != -1] =", "initial_values['c'] u = initial_values['u'] prev_nll = 0 cur_step = 0 while delta >", "# above the diagonal cur_step += 1 if verbose and cur_step % 100", "on all of the points in a plateau.''' return minimize_scalar(plateau_loss_func, args=(data, signal_dist, null_dist),", "weights vector (E-step) post_prob = self._e_step(data, prior_prob) if verbose: print('\\tM-step...') # Find beta", "u using a super fast graph fused lasso library that has an optimized", "for x,y in p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in p:", "of freedom') # Create a grid structure out of the vector of betas", "inflate t_dual *= inflate # Update the step counter cur_step += 1 if", "decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n) # likelihood term", "and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm,", "initial_values['u'] primal_trace = [] dual_trace = [] converged = False cur_step = 0", "'''Solve for u using sequential least squares.''' if verbose: print('\\t\\tSolving u via Sequential", "If we already have checked this one, just pop it off while to_check", "cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the solution", "1: if idx[0] > 0: local_check.append(idx[0] - 1) # left if idx[0] <", "(1 - prior_prob)) y = beta - (prior_prob - post_prob) / weights print(weights)", "u['x'] elif dual_solver == 'graph': u = self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2],", "= initial_values['z'] u = initial_values['u'] primal_trace = [] dual_trace = [] converged =", "given penalty matrix.''' delta = converge + 1 if initial_values is None: beta", "possible boundary of the plateau while cur_unchecked: idx = cur_unchecked.popleft() # neighbors to", "__init__(self, mean, stdev): self.mean = mean self.stdev = stdev def pdf(self, data): return", "to the final values results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps,", "results.x def _u_lbfgsb(self, x, A, _lambda, verbose, u0=None): '''Solve for u using L-BFGS-B.'''", "t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self, y, weights,", ">= min_member \\ and beta[local_idx] <= max_member: # Label this index as being", "Over-relaxation z_new = _soft_threshold(Dx_hat + u / a, _lambda / a) dual_residual =", "dof_trace[i] - 2. * log_likelihood_trace[i] # Calculate AICc = AIC + 2k *", "u } self.solver.alpha = alpha self.solver.inflate = inflate self.solver.maxsteps = max_steps self.solver.converge =", "W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction", "penalties[3] if dual_solver == 'graph' else None if grid_data is not None: grid_points", "matrix inverse in exchange for varying the step size #W_over_a = sparse.dia_matrix(np.diag(weights /", "z - u) x = np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) # Update z", "u using L-BFGS-B.''' if verbose: print('\\t\\tSolving u via L-BFGS-B') if u0 is None:", "dof_tolerance).sum() + 1 # Use the naive DoF if verbose: print('Calculating AIC') #", "* self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate the complete-data", "+ 1, idx[1])) # right if idx[1] > 0: local_check.append((idx[0], idx[1] - 1))", "_u_admm_lucache(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve", "cur_plateau)) # Returns the list of plateaus and their values return plateaus def", "+= 1 if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm:", "pygfl.solver import TrailSolver class GaussianKnown: ''' A simple Gaussian distribution with known mean", "solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000,", "penalties[3], cd_converge, cd_max_steps, max(0, verbose - 1), admm_alpha, admm_inflate, initial_values=u) beta = u['beta']", "Fused Lasso') # if initial_values is None: # beta = np.zeros(y.shape, dtype='double') #", "+= 1 prev_objective = cur_objective return u def _u_slsqp(self, x, A, _lambda, verbose,", "while delta > converge and cur_step < max_steps: if verbose > 1: print('\\t\\tM-Step", "inflate, initial_values=None): '''Solve for u using a super fast graph fused lasso library", "to_check.popleft() # If we already have checked this one, just pop it off", "= 0 while not converged and cur_step < max_steps: # Update x out", "The degrees of freedom of each final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace =", "'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def", "* post_prob / exp_beta + beta - (1 + exp_beta) if verbose >", "elif dual_solver == 'sls': # Solve the dual via sequential least squares u", "u via Sequential Least Squares') if u0 is None: u0 = np.zeros(A.shape[1]) #", "print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...') # Get the likelihood weights vector (E-step) post_prob", "the results of the run return {'beta': beta, 'u': u, 'w': post_prob, 'c':", "x = np.array([y.mean()] * n) # likelihood term z = np.zeros(n) # slack", "weights) weights = weights.flatten() return (weights, posteriors) def _u_objective_func(u, x, A): return np.linalg.norm(x", "and stdev. ''' def __init__(self, mean, stdev): self.mean = mean self.stdev = stdev", "[{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph': print('\\tdegrees of", "else 0.5 Kd = np.array([a] + [2*a] * (n-2) + [a]) + W", "the final run parameters to use for warm-starting the next iteration initial_values =", "a /= inflate u_dual *= inflate t_dual *= inflate # Update the step", "+ np.exp(beta)) u = initial_values else: beta = initial_values['beta'] prior_prob = initial_values['c'] u", "AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for", "# graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int #", "value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The AIC score", "u = np.zeros(breakpoints[-1], dtype='double') # else: # beta = initial_values['beta'] # z =", "s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor}", "= len(y) m = D.shape[0] a = _lambda # step-size parameter # Set", "else None if grid_data is not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan", "u = None else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver != 'admm' and", "dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None):", "arg = s - t_dual local_lambda = (_lambda - np.abs(arg) / 2.).clip(0) if", "Load the graph fused lasso library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight", "t_dual + primal_residual_r # Check convergence primal_resnorm = np.sqrt((np.array([i for i in primal_residual_x]", "Descent') u = u0 if u0 is not None else np.zeros(A.shape[1]) l2_norm_A =", "a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in", "< max_steps: # Update x x = (weights * y + a *", "converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of", "dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the signal probabilities prior_prob = ilogit(beta)", "index has been checked while to_check: if verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) +", "beta using an alternating Taylor approximation and convex optimization (M-step) beta, u =", "null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist = null_dist if penalties_cross_x is None: self.penalties_cross_x", "u = self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose", "data.''' return (np.log(1 + np.exp(beta)) - post_prob * beta).sum() def _graph_fused_lasso(self, y, weights,", "A.T[coord] * prev_u - A.T[coord] * u[coord] # Track the change in the", "[(-_lambda, _lambda) for u0_i in u0] results = minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv,", "= self._m_log_likelihood(post_prob, beta) # Track the convergence delta = np.abs(prev_nll - cur_nll) /", "converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None,", "the solution path of the generalized lasso to find the best lambda value.'''", "w_trace = [] c_trace = [] results_trace = [] best_idx = None best_plateaus", "alternating Taylor approximation and convex optimization (M-step) beta, u = self._m_step(beta, prior_prob, post_prob,", "c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1])", "u = initial_values['u'] # n = y.shape[0] # self.graphfl_weight(n, y, weights, ntrails, trails,", "penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive,", "u, 'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps, verbose,", "verbose}) return results.x def plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized 1-d regression", "post_prob.max())) if dual_solver != 'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return", "admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the signal probabilities prior_prob = ilogit(beta) cur_nll =", "beta - (prior_prob - post_prob) / weights print(weights) print(y) if dual_solver == 'cd':", "dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion about the current", "- post_prob * beta).sum() def _graph_fused_lasso(self, y, weights, _lambda, ntrails, trails, breakpoints, edges,", "< max_steps: # Update x x_numerator = 1.0 / a * weights *", "function value cur_objective = _u_objective_func(u, x, A) delta = np.abs(prev_objective - cur_objective) /", "negative log-likelihood function for a plateau.''' return -np.log(c * signal_dist.pdf(data) + (1. -", "= Dx_relaxed - z # Update u u = u + a *", "Algorithm (a.k.a Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0]", "use for warm-starting the next iteration initial_values = results # Save the trace", "- alpha) * z # over-relax Dx z_new = _soft_threshold(Dx_relaxed + u /", "lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The AIC score for", "= (1+exp_beta)**2 * post_prob / exp_beta + beta - (1 + exp_beta) if", "max_steps: # Update x out = _1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl, Ku,", "dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] - 1.) # Calculate BIC =", "% 100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof", "reset(self): self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters = []", "+ (1 - alpha) * z # Over-relaxation z_new = _soft_threshold(Dx_hat + u", "1 prev_objective = _u_objective_func(u, x, A) cur_step = 0 while delta > converge", "coord in range(len(u)): prev_u = u[coord] next_u = prev_u + A.T[coord].dot(r) / l2_norm_A[coord]", "for i,(level,p) in enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1)) # Get the subset of", "for each lambda value (correcting for finite sample size) bic_trace = np.zeros(lambda_grid.shape) #", "lambda value dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom of each final", "cur_step = 0 while delta > converge and cur_step < max_steps: if verbose:", "L #x_denominator = sparse.linalg.inv(W_over_a + L) # Initialize primal and dual variables if", "prev_objective = _u_objective_func(u, x, A) cur_step = 0 while delta > converge and", "run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1.,", "dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 # Use the", "!= 'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the results of", "y, weights, ntrails, trails, breakpoints, _lambda, alpha, inflate, max_steps, converge, beta, z, u)", "+ converge) if verbose: print('\\tDelta: {0}'.format(delta)) # Track the step self.add_step(post_prob, beta, prior_prob,", "= np.sum((y - x)**2) + 2 * dof return {'x': x, 'z': z,", "<gh_stars>1-10 import itertools import numpy as np from scipy import sparse from scipy.stats", "(1-D fused lasso)') n = len(y) m = n - 1 a =", "'''Solve for u using alternating direction method of multipliers. Note that this method", "and dual residuals a *= 2 if primal_resnorm > 10 * dual_resnorm else", "lambda value (correcting for finite sample size) bic_trace = np.zeros(lambda_grid.shape) # The BIC", "r_accel - s u_dual = u_dual + primal_residual_x t_dual = t_dual + primal_residual_r", "D.dot(z_new) dual_residual_u = a * (z_new - z) dual_residual_t = a * (s_new", "in u0] # Fit results = minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp':", "Check convergence primal_resnorm = np.sqrt((np.array([i for i in primal_residual_x] + [i for i", "u = self._u_slsqp(x, A, _lambda, verbose > 1, u0=u) elif dual_solver == 'lbfgs':", "# Get the negative log-likelihood of the data given our new parameters cur_nll", "1 # Update the negative log-likelihood tracker prev_nll = cur_nll # DEBUGGING if", "cd_converge, cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif", "/ a) dual_residual = a * D.T.dot(z_new - z) z = z_new primal_residual", "primal and dual residuals a *= 2 if primal_resnorm > 10 * dual_resnorm", "exp_beta)**2 y = (1+exp_beta)**2 * post_prob / exp_beta + beta - (1 +", "down if idx[1] < beta.shape[1] - 1: local_check.append((idx[0], idx[1] + 1)) # up", "optimized ADMM routine.''' if verbose: print('\\t\\tSolving via Graph Fused Lasso') # if initial_values", "verbose > 1, u0=u) elif dual_solver == 'admm': # Solve the dual via", "method of multipliers with a cached LU decomposition.''' if verbose: print('\\t\\tSolving u via", "x_accel - z primal_residual_r = r_accel - s u_dual = u_dual + primal_residual_x", "(weights * y + a * (z - u_dual)) / (weights + a)", "probabilities prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm': #", "# Solve the dual via coordinate descent u = self._u_coord_descent(x, A, _lambda, cd_converge,", "## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0] beta = [0] n =", "bic_trace[i] < bic_trace[best_idx]: best_idx = i best_plateaus = plateaus # Save the final", "self.mean = mean self.stdev = stdev def pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev)", "def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint", "_lambda, D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction", "u0=None): '''Solve for u using coordinate descent.''' if verbose: print('\\t\\tSolving u via Coordinate", "Dx = np.ediff1d(x) # Update z Dx_hat = alpha * Dx + (1", "i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def ilogit(x): return", "# below the diagonal Ku = np.array([-a] * (n-1)) # above the diagonal", "to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix", "least squares u = self._u_slsqp(x, A, _lambda, verbose > 1, u0=u) elif dual_solver", "max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of multipliers.", "adaptive=False): '''Solve for u using alternating direction method of multipliers with a cached", "primal_trace = [] dual_trace = [] converged = False cur_step = 0 D_full", "Dx_hat - z # Update u u = (u + a * primal_residual).clip(-_lambda,", "# Track the change in the objective function value cur_objective = _u_objective_func(u, x,", "an unchecked one. if check_map[idx]: break # Create the plateau and calculate the", "/ np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights = (prior_prob * (1 - prior_prob)) y", "= results # Save the trace of all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u'])", "= np.sqrt((np.array([i for i in dual_residual_u] + [i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm)", "initial_values=None, grid_data=None, grid_map=None): '''Follows the solution path of the generalized lasso to find", "Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the LU decomposition lu_factor =", "A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x def plateau_regression(self, plateaus, data, grid_map=None, verbose=False):", "initial_values['beta'] # z = initial_values['z'] # u = initial_values['u'] # n = y.shape[0]", "u via Alternating Direction Method of Multipliers') n = len(y) m = D.shape[0]", "= np.array([data[x,y] for x,y in p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx", "= np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product", "lasso to find the best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace", "def _u_objective_func(u, x, A): return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x, A): return", "np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx,", "= D.shape[0] a = inflate * _lambda # step-size parameter # Initialize primal", "ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'),", "of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the results of the run return", "converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order", "Lambda = {1}'.format(i, _lambda)) # Clear out all the info from the previous", "that has an optimized ADMM routine.''' if verbose: print('\\t\\tSolving via Graph Fused Lasso')", "use a tridiagonal representation # of K. Kd = np.array([a] + [2*a] *", "collections import deque from pygfl.solver import TrailSolver class GaussianKnown: ''' A simple Gaussian", "# If we already have checked this one, just pop it off while", "iterate and coordinate descent to optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta) delta", "[(-_lambda, _lambda) for _ in u0] # Fit results = minimize(_u_objective_func, u0, args=(x,", "have checked this one, just pop it off while to_check and check_map[idx]: try:", "and down elif len(beta.shape) == 2: if idx[0] > 0: local_check.append((idx[0] - 1,", "local_check.append((idx[0], idx[1] + 1)) # up # Only supports 1d and 2d cases", "a * (z_new - z) dual_residual_t = a * (s_new - s) z", "y + out) Dx = np.ediff1d(x) # Update z Dx_hat = alpha *", "results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver,", "self._m_log_likelihood(post_prob, beta) # Track the convergence delta = np.abs(prev_nll - cur_nll) / (prev_nll", "lasso case.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers (1-D", "edges are specified explicitly. ({0} given)'.format(len(beta.shape))) # Check the index's unchecked neighbors for", "t_dual) z_new = lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u = a * (z_new -", "# beta = np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1], dtype='double') # u =", "prior_prob) if dual_solver == 'admm': # Get the negative log-likelihood of the data", "z t_dual = np.zeros(m) # scaled dual variable for constraint r = s", "# self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double,", "objective value cur_step += 1 prev_objective = cur_objective return u def _u_slsqp(self, x,", "ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int,", "of primal and dual residuals a *= 2 if primal_resnorm > 10 *", "constraints bounds = [(-_lambda, _lambda) for _ in u0] # Fit results =", "grid_map=None): '''Follows the solution path of the generalized lasso to find the best", "the grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] =", "= deque(itertools.product(*[range(x) for x in beta.shape])) if edges is None else deque(range(len(beta))) check_map", "unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's indices plateaus.append((val, cur_plateau)) # Returns", "u0 = np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda, _lambda) for", "local_check.extend(edges[idx]) # 1d case -- check left and right elif len(beta.shape) == 1:", "verbose: print('Calculating AIC') # Get the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) #", "+= 1 # Update the negative log-likelihood tracker prev_nll = cur_nll return beta,", "/ (prev_nll + converge) if verbose > 1: print('\\t\\tM-step delta: {0}'.format(delta)) # Increment", "= prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight /", "model thus far if best_idx is None or bic_trace[i] < bic_trace[best_idx]: best_idx =", "the given penalty matrix.''' delta = converge + 1 if initial_values is None:", "> converge and cur_step < max_steps: # Update each coordinate one at a", "bounds = [(-_lambda, _lambda) for u0_i in u0] results = minimize(_u_objective_func, u0, args=(x,", "2d case -- check left, right, up, and down elif len(beta.shape) == 2:", "def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x):", "_m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood of the betas given the weights and", "iteration initial_values = results # Save the trace of all the resulting parameters", "converge_threshold and primal_resnorm < converge_threshold # Update step-size parameter based on norm of", "# over-relax Dx z_new = _soft_threshold(Dx_relaxed + u / a, _lambda / a)", "the index's unchecked neighbors for local_idx in local_check: if not check_map[local_idx] \\ and", "_lambda, verbose > 1, u0=u) elif dual_solver == 'lbfgs': # Solve the dual", "0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the", "an optimized ADMM routine.''' if verbose: print('\\t\\tSolving via Graph Fused Lasso') # if", "alpha * Dx + (1 - alpha) * z # over-relax Dx z_new", "(1 - alpha) * z # Over-relaxation z_new = _soft_threshold(Dx_hat + u /", "value cur_objective = _u_objective_func(u, x, A) delta = np.abs(prev_objective - cur_objective) / (prev_objective", "'u': self.solver.u } def _u_admm_lucache(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.8,", "cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes = [c_int,", "+ a) x_accel = alpha * x + (1 - alpha) * z", "to_check and check_map[idx]: try: idx = to_check.popleft() except: break # Edge case --", "(n-1)) # above the diagonal # Initialize primal and dual variables if initial_values", "def tridiagonal_solve(a,b,c,f): alpha = [0] beta = [0] n = len(f) x =", "bic_trace[i])) if verbose: print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc:", "TrailSolver class GaussianKnown: ''' A simple Gaussian distribution with known mean and stdev.", "verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for u using alternating direction method of", "print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >=", "< max_steps: # Update each coordinate one at a time. for coord in", "beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph':", "= calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() +", "signal_weight / (signal_weight + null_weight) return post_prob def _m_step(self, beta, prior_prob, post_prob, penalties,", "- cur_nll) / (prev_nll + converge) if verbose > 1: print('\\t\\tM-step delta: {0}'.format(delta))", "_lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None):", "delta) # Increment the step counter cur_step += 1 # Update the negative", "converge_threshold # Update step-size parameter based on norm of primal and dual residuals", "of multipliers with a cached LU decomposition.''' if verbose: print('\\t\\tSolving u via Alternating", "+ primal_residual_r # Check convergence primal_resnorm = np.sqrt((np.array([i for i in primal_residual_x] +", "verbose, alpha, inflate, initial_values=None): '''Solve for u using a super fast graph fused", "constraints bounds = [(-_lambda, _lambda) for u0_i in u0] results = minimize(_u_objective_func, u0,", "np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus = [] if verbose: print('\\tCalculating plateaus...') if", "dual_resnorm, primal_resnorm)) return {'x': x, 'r': r, 'z': z, 's': s, 'u_dual': u_dual,", "if verbose > 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache the exponentiated", "1: print('\\t\\tM-step delta: {0}'.format(delta)) # Increment the step counter cur_step += 1 #", "z, u) # return {'beta': beta, 'z': z, 'u': u } self.solver.alpha =", "inflate * _lambda # step-size parameter # Initialize primal and dual variables from", "of grid points for this plateau if grid_map is not None: plateau_data =", "converge and cur_step < max_steps: if verbose > 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor", "plateau_loss_func(c, data, signal_dist, null_dist): '''The negative log-likelihood function for a plateau.''' return -np.log(c", "# Get the current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) # Track the convergence", "a) r_accel = alpha * r + (1 - alpha) * s #", "paying a matrix inverse in exchange for varying the step size #W_over_a =", "= [] self.delta_iters = [] # ''' Load the graph fused lasso library", "{2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc':", "dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm for the data with", "= np.zeros(m) else: x = initial_values['x'] z = initial_values['z'] u = initial_values['u'] primal_trace", "the diagonal matrix of weights. We use a tridiagonal representation # of K.", "u = u + a * primal_residual # Check convergence primal_resnorm = np.sqrt((primal_residual", "len(f) x = [0] * n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i]))", "set arg = x_accel + u_dual + D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg)", "print('\\t\\tSolving u via Alternating Direction Method of Multipliers (1-D fused lasso)') n =", "vector (E-step) post_prob = self._e_step(data, prior_prob) if verbose: print('\\tM-step...') # Find beta using", "*= 2 if primal_resnorm > 10 * dual_resnorm else 0.5 Kd = np.array([a]", "graph case if edges is not None: local_check.extend(edges[idx]) # 1d case -- check", "edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 # Use", "Increment the step counter and update the previous objective value cur_step += 1", "Kl = np.array([-a] * (n-1)) # below the diagonal Ku = np.array([-a] *", "parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0} AIC: {1} AICc: {2}", "if we've converged delta = np.abs(cur_nll - prev_nll) / (prev_nll + converge) if", "= -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) - np.log(2 * np.pi)) #", "= initial_values['x'] z = initial_values['z'] u = initial_values['u'] primal_trace = [] dual_trace =", "deque from pygfl.solver import TrailSolver class GaussianKnown: ''' A simple Gaussian distribution with", "= to_check.popleft() # If we already have checked this one, just pop it", "for u0_i in u0] results = minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP',", "case -- If we went through all the indices without reaching an unchecked", "Track the best model thus far if best_idx is None or bic_trace[i] <", "s) z = z_new s = s_new # Dual update primal_residual_x = x_accel", "a) x_accel = alpha * x + (1 - alpha) * z #", "and convex optimization (M-step) beta, u = self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge,", "- alpha) * z # Over-relaxation z_new = _soft_threshold(Dx_hat + u / a,", "if initial_values is None: # beta = np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1],", "np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0) r = x - A.dot(u) delta =", "sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a + L)", "next_u = prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u)) r", "naive DoF if verbose: print('Calculating AIC') # Get the negative log-likelihood log_likelihood_trace[i] =", "adaptive else _lambda r = _soft_threshold(arg, local_lambda / a) r_accel = alpha *", "verbose: print('\\tE-step...') # Get the likelihood weights vector (E-step) post_prob = self._e_step(data, prior_prob)", "* beta).sum() def _graph_fused_lasso(self, y, weights, _lambda, ntrails, trails, breakpoints, edges, converge, max_steps,", "in local_check: if not check_map[local_idx] \\ and beta[local_idx] >= min_member \\ and beta[local_idx]", "* _lambda # step-size parameter # Initialize primal and dual variables from warm", "= val + rel_tol # Check every possible boundary of the plateau while", "# ''' Load the graph fused lasso library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so')", "via Coordinate Descent') u = u0 if u0 is not None else np.zeros(A.shape[1])", "primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual", "z Dx_relaxed = alpha * Dx + (1 - alpha) * z #", "deque(itertools.product(*[range(x) for x in beta.shape])) if edges is None else deque(range(len(beta))) check_map =", "if best_idx is None or bic_trace[i] < bic_trace[best_idx]: best_idx = i best_plateaus =", "ntrails, trails, breakpoints, _lambda, alpha, inflate, max_steps, converge, beta, z, u) # return", "multipliers with a cached LU decomposition.''' if verbose: print('\\t\\tSolving u via Alternating Direction", "+ 2 * dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] - 1.) #", "degrees of freedom') # Create a grid structure out of the vector of", "cur_nll # DEBUGGING if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max()))", "constraint term r arg = s - t_dual local_lambda = (_lambda - np.abs(arg)", "step-size parameter # Set up system involving graph Laplacian L = D.T.dot(D) W_over_a", "1-D fused lasso case.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of", "return {'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_coord_descent(self,", "u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message))", "in enumerate(lambda_grid): if verbose: print('#{0} Lambda = {1}'.format(i, _lambda)) # Clear out all", "prior_prob) if verbose: print('\\tM-step...') # Find beta using an alternating Taylor approximation and", "# Create our box constraints bounds = [(-_lambda, _lambda) for _ in u0]", "prior_prob)) y = beta - (prior_prob - post_prob) / weights print(weights) print(y) if", "self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate,", "_lambda) for u0_i in u0] results = minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds,", "# Cache the LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] *", "def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return 'N({:.2f},", "return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class", "admm_alpha=admm_alpha, u0=u) # Get the signal probabilities prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data,", "= [] self.c_iters = [] self.delta_iters = [] def solution_path(self, data, penalties, dof_tolerance=1e-4,", "Lasso') # if initial_values is None: # beta = np.zeros(y.shape, dtype='double') # z", "idx[1]]] = w posteriors = self._e_step(data, weights) weights = weights.flatten() return (weights, posteriors)", "{'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_coord_descent(self, x,", "- x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for the i'th value of", "return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx])", "_lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose - 1), admm_alpha, admm_inflate,", "else: raise Exception('Degrees of freedom calculation does not currently support more than 2", "for now else: raise Exception('Degrees of freedom calculation does not currently support more", "beta, u def _m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood of the betas given", "initial_values['t_dual'] primal_trace = [] dual_trace = [] converged = False cur_step = 0", "bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient", "= np.exp(beta) / (1 + np.exp(beta)) u = initial_values else: beta = initial_values['beta']", "prior_prob, delta) # Increment the step counter cur_step += 1 # Update the", "self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters = [] self.beta_iters = [] self.c_iters =", "plateau if grid_map is not None: plateau_data = np.array([data[grid_map[x,y]] for x,y in p])", "supports 1d and 2d cases for now else: raise Exception('Degrees of freedom calculation", "0: local_check.append((idx[0] - 1, idx[1])) # left if idx[0] < beta.shape[0] - 1:", "# 2d case -- check left, right, up, and down elif len(beta.shape) ==", "is None: u0 = np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda,", "[] if verbose: print('\\tCalculating plateaus...') if verbose > 1: print('\\tIndices to check {0}", "inverse in exchange for varying the step size #W_over_a = sparse.dia_matrix(np.diag(weights / a))", "return {'beta': beta, 'u': u, 'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob):", "= initial_values['lu_factor'] x = initial_values['x'] z = initial_values['z'] r = initial_values['r'] s =", "new parameters cur_nll += _lambda * np.abs(u['r']).sum() # Track the change in log-likelihood", "= [] dual_trace = [] converged = False cur_step = 0 D_full =", "+ (1. - c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d", "_lambda, u): '''Constraint function for the i'th value of u.''' return np.array([_lambda -", "locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's indices plateaus.append((val, cur_plateau)) # Returns the", "= initial_values['beta'] # z = initial_values['z'] # u = initial_values['u'] # n =", "Return the results of the run return {'beta': beta, 'u': u, 'w': post_prob,", "+ 1 u = u0 cur_step = 0 while delta > converge and", "verbose: print('\\t\\tSolving via Graph Fused Lasso') # if initial_values is None: # beta", "/ (1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus (degrees", "bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace),", "_lambda / a) dual_residual = a * _1d_fused_lasso_crossprod(z_new - z) z = z_new", "_lambda, ntrails, trails, breakpoints, edges, converge, max_steps, verbose, alpha, inflate, initial_values=None): '''Solve for", "#{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the step counter", "'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus':", "diagonal matrix of weights. We use a tridiagonal representation # of K. Kd", "= np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters = []", "_lambda # step-size parameter # Initialize primal and dual variables from warm start", "[] results_trace = [] best_idx = None best_plateaus = None flat_data = data.flatten()", "weights = (prior_prob * (1 - prior_prob)) y = beta - (prior_prob -", "elif dual_resnorm > 5 * primal_resnorm: a /= inflate u_dual *= inflate t_dual", "in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1]", "dual_resnorm else 0.5 # Recalculate the x_denominator since we changed the step-size #", "method of multipliers.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers')", "prev_u = u[coord] next_u = prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda,", "primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold if primal_resnorm", "lasso library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype", "print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose:", "* dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] - 1.) # Calculate BIC", "check_map[np.isnan(beta)] = True plateaus = [] if verbose: print('\\tCalculating plateaus...') if verbose >", "1), admm_alpha, admm_inflate, initial_values=u) beta = u['beta'] # if np.abs(beta).max() > 20: #", "cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees of freedom')", "> 1: print('\\tIndices to check {0} {1}'.format(len(to_check), check_map.shape)) # Loop until every beta", "dual_resnorm: a *= inflate u_dual /= inflate t_dual /= inflate elif dual_resnorm >", "to check local_check = [] # Generic graph case if edges is not", "convergence primal_resnorm = np.sqrt((np.array([i for i in primal_residual_x] + [i for i in", "mean and stdev. ''' def __init__(self, mean, stdev): self.mean = mean self.stdev =", "if grid_map is None else grid_map[idx[0], idx[1]]] = w posteriors = self._e_step(data, weights)", "def _u_slsqp(self, x, A, _lambda, verbose, u0=None): '''Solve for u using sequential least", "converge + 1 prev_objective = _u_objective_func(u, x, A) cur_step = 0 while delta", "Exception('Degrees of freedom calculation does not currently support more than 2 dimensions unless", "x_denominator since we changed the step-size # TODO: is this worth it? We're", "# Generic graph case if edges is not None: local_check.extend(edges[idx]) # 1d case", "def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001,", "(1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus (degrees of", "else: beta = initial_values['beta'] prior_prob = initial_values['c'] u = initial_values['u'] prev_nll = 0", "a * (z - u_dual)) / (weights + a) x_accel = alpha *", "0 while delta > converge and cur_step < max_steps: if verbose: print('Step #{0}'.format(cur_step))", "_lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values)", "= Dx_hat - z # Update u u = (u + a *", "reaching an unchecked one. if check_map[idx]: break # Create the plateau and calculate", "penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose - 1), admm_alpha, admm_inflate, initial_values=u) beta =", "= np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus = [] if verbose: print('\\tCalculating plateaus...')", "dual_resnorm < converge_threshold and primal_resnorm < converge_threshold # Update step-size parameter based on", "the x_denominator since we changed the step-size # TODO: is this worth it?", "for u using alternating direction method of multipliers.''' if verbose: print('\\t\\tSolving u via", "beta = np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1],", "idx[1] > 0: local_check.append((idx[0], idx[1] - 1)) # down if idx[1] < beta.shape[1]", "(dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] - 1.) # Calculate BIC = -2ln(L) +", "not converged and cur_step < max_steps: # Update x x = (weights *", "parameter # Set up system involving graph Laplacian L = D.T.dot(D) W_over_a =", "1.) # Calculate BIC = -2ln(L) + k * (ln(n) - ln(2pi)) bic_trace[i]", "# Check the index's unchecked neighbors for local_idx in local_check: if not check_map[local_idx]", "has been checked while to_check: if verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1))", "== 0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment", "and cur_step < max_steps: # Update x out = _1d_fused_lasso_crossprod(a*z - u) x", "tridiagonal_solve(Kl, Ku, Kd, W * y + out) Dx = np.ediff1d(x) # Update", "dual_solver != 'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the results", "+ converge) if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: Objective:", "a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1] return", "and dual_solver != 'graph': # weights is a diagonal matrix, represented as a", "matrix.''' delta = converge + 1 if initial_values is None: beta = np.zeros(data.shape)", "u0] results = minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter':", "admm_alpha, admm_inflate, initial_values=u) beta = u['beta'] # if np.abs(beta).max() > 20: # beta", "{0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self, x,", "dimensions unless edges are specified explicitly. ({0} given)'.format(len(beta.shape))) # Check the index's unchecked", "r + (1 - alpha) * s # Projection to constraint set arg", "'admm': # Solve the dual via alternating direction methods of multipliers #u =", "* log_likelihood_trace[i] # Calculate AICc = AIC + 2k * (k+1) / (n", "= [] self.delta_iters = [] def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30,", "_lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta =", "out all the info from the previous run self.reset() # Fit to the", "index's unchecked neighbors for local_idx in local_check: if not check_map[local_idx] \\ and beta[local_idx]", "admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees of freedom') # Create a grid structure", "+ (1 - alpha) * s # Projection to constraint set arg =", "self.add_step(post_prob, beta, prior_prob, delta) # Increment the step counter cur_step += 1 #", "print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the step", "if not check_map[local_idx] \\ and beta[local_idx] >= min_member \\ and beta[local_idx] <= max_member:", "def _soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm", "np.sum((y - x)**2) + 2 * dof return {'x': x, 'z': z, 'u':", "self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters = [] #", "verbose > 1: print('\\t\\tForming dual...') x = np.sqrt(weights) * y A = (1.", "to_check: if verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) # Get the next", "2.).clip(0) if adaptive else _lambda r = _soft_threshold(arg, local_lambda / a) r_accel =", "p: weights[idx if grid_map is None else grid_map[idx[0], idx[1]]] = w posteriors =", "went through all the indices without reaching an unchecked one. if check_map[idx]: break", "- 2. * log_likelihood_trace[i] # Calculate AICc = AIC + 2k * (k+1)", "print('\\t\\tSolving u via L-BFGS-B') if u0 is None: u0 = np.zeros(A.shape[1]) # Create", "print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph': print('\\tdegrees", "left, right, up, and down elif len(beta.shape) == 2: if idx[0] > 0:", "flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int, c_double,", "-np.log(signal_weight + null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate the complete-data sufficient statistics (weights", "None best_plateaus = None flat_data = data.flatten() edges = penalties[3] if dual_solver ==", "* y + out) Dx = np.ediff1d(x) # Update z Dx_hat = alpha", "print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the results of the run", "= prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum()", "off while to_check and check_map[idx]: try: idx = to_check.popleft() except: break # Edge", "Taylor-series expansion about the current iterate and coordinate descent to optimize Beta. '''", "(1 - alpha) * s # Projection to constraint set arg = x_accel", "else 0.5 # Recalculate the x_denominator since we changed the step-size # TODO:", "case.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers (1-D fused", "print('\\t\\tTaylor approximation...') # Cache the exponentiated beta exp_beta = np.exp(beta) # Form the", "converge + 1 u = u0 cur_step = 0 while delta > converge", "ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double,", "we went through all the indices without reaching an unchecked one. if check_map[idx]:", "converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs", "'''Solve for u using alternating direction method of multipliers with a cached LU", "0 D_full = D while not converged and cur_step < max_steps: # Update", "z_new = lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u = a * (z_new - z)", "+ L #x_denominator = sparse.linalg.inv(W_over_a + L) # Initialize primal and dual variables", "weights, ntrails, trails, breakpoints, _lambda, alpha, inflate, max_steps, converge, beta, z, u) #", "cur_plateau = set([idx]) cur_unchecked = deque([idx]) val = beta[idx] min_member = val -", "statistics (weights vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data)", "u0=u) elif dual_solver == 'admm': # Solve the dual via alternating direction methods", "penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False,", "a) #x_denominator = sparse.linalg.inv(W_over_a + L) # Update the step counter cur_step +=", "known mean and stdev. ''' def __init__(self, mean, stdev): self.mean = mean self.stdev", "self.c_iters = [] self.delta_iters = [] # ''' Load the graph fused lasso", "pop it off while to_check and check_map[idx]: try: idx = to_check.popleft() except: break", "u_dual *= inflate t_dual *= inflate # Update the step counter cur_step +=", "diagonal cur_step += 1 if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep", "[] u_trace = [] w_trace = [] c_trace = [] results_trace = []", "method='SLSQP', options={'disp': False, 'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations:", "np.sqrt((np.array([i for i in primal_residual_x] + [i for i in primal_residual_r])**2).mean()) dual_resnorm =", "20: # beta = np.clip(beta, -20, 20) # u = None else: raise", "the plateau while cur_unchecked: idx = cur_unchecked.popleft() # neighbors to check local_check =", "plateau while cur_unchecked: idx = cur_unchecked.popleft() # neighbors to check local_check = []", "solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0] beta = [0] n", "_lambda) # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual **", "while cur_unchecked: idx = cur_unchecked.popleft() # neighbors to check local_check = [] #", "D.shape[0] a = inflate * _lambda # step-size parameter # Initialize primal and", "< beta.shape[0] - 1: local_check.append(idx[0] + 1) # right # 2d case --", "{2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x, 'r': r, 'z': z, 's': s, 'u_dual':", "current iterate and coordinate descent to optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta)", "- c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d regression on", "- dof_trace[i] - 1.) # Calculate BIC = -2ln(L) + k * (ln(n)", "= i best_plateaus = plateaus # Save the final run parameters to use", "1.0 / a * weights * y + D.T.dot(a * z - u)", "the previous run self.reset() # Fit to the final values results = self.run(flat_data,", "> 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif dual_solver == 'graph':", "u_dual)) / (weights + a) x_accel = alpha * x + (1 -", "self._u_lbfgsb(x, A, _lambda, verbose > 1, u0=u) elif dual_solver == 'admm': # Solve", "c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int, c_double, # ndpointer(c_double,", "plateaus = [] if verbose: print('\\tCalculating plateaus...') if verbose > 1: print('\\tIndices to", "+ exp_beta)**2 y = (1+exp_beta)**2 * post_prob / exp_beta + beta - (1", "> converge and cur_step < max_steps: if verbose > 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step))", "edges = penalties[3] if dual_solver == 'graph' else None if grid_data is not", "of Multipliers') n = len(y) m = D.shape[0] a = inflate * _lambda", "grid of beta values in linear time.''' to_check = deque(itertools.product(*[range(x) for x in", "cur_plateau.add(local_idx) # Track each plateau's indices plateaus.append((val, cur_plateau)) # Returns the list of", "exp_beta / (1 + exp_beta)**2 y = (1+exp_beta)**2 * post_prob / exp_beta +", "K. Kd = np.array([a] + [2*a] * (n-2) + [a]) + W #", "else: # beta = initial_values['beta'] # z = initial_values['z'] # u = initial_values['u']", "= [0] * n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] -", "cd_converge, cd_max_steps, verbose > 1, initial_values=u) #u = self._u_admm(y, weights, _lambda, penalties, cd_converge,", "n = y.shape[0] # self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints, _lambda, alpha, inflate,", "Initialize primal and dual variables from warm start if initial_values is None: #", "# z = np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1], dtype='double') # else: #", "import partial from collections import deque from pygfl.solver import TrailSolver class GaussianKnown: '''", "/ a, _lambda / a) dual_residual = a * _1d_fused_lasso_crossprod(z_new - z) z", "initial_values['beta'] prior_prob = initial_values['c'] u = initial_values['u'] prev_nll = 0 cur_step = 0", "given our new parameters cur_nll += _lambda * np.abs(u['r']).sum() # Track the change", "only works for the 1-D fused lasso case.''' if verbose: print('\\t\\tSolving u via", "change in the objective function value cur_objective = _u_objective_func(u, x, A) delta =", "= np.exp(beta) # Form the parameters for our weighted least squares if dual_solver", "grid_map[idx[0], idx[1]]] = w posteriors = self._e_step(data, weights) weights = weights.flatten() return (weights,", "of a 1d or 2d grid of beta values in linear time.''' to_check", "= [] converged = False cur_step = 0 while not converged and cur_step", "/ (flat_data.shape[0] - dof_trace[i] - 1.) # Calculate BIC = -2ln(L) + k", "+ 2k * (k+1) / (n - k - 1) aicc_trace[i] = aic_trace[i]", "'plateaus': best_plateaus} def run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100,", "log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC = 2k - 2ln(L) aic_trace[i] =", "verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers (1-D fused lasso)') n", "trails, breakpoints, edges, converge, max_steps, verbose, alpha, inflate, initial_values=None): '''Solve for u using", "np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i, _lambda in enumerate(lambda_grid): if verbose: print('#{0} Lambda", "{1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the step counter and update", "dual_solver == 'admm': # Solve the dual via alternating direction methods of multipliers", "Initialize primal and dual variables if initial_values is None: x = np.array([y.mean()] *", "x, A): return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) -", "every beta index has been checked while to_check: if verbose > 1: print('\\t\\tPlateau", "u) x = np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) # Update z Dx_relaxed =", "in u0] results = minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False,", "= u0 if u0 is not None else np.zeros(A.shape[1]) l2_norm_A = (A *", "x = initial_values['x'] z = initial_values['z'] r = initial_values['r'] s = initial_values['s'] u_dual", "initial_values is None: # beta = np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1], dtype='double')", "dual variable for constraint x = z t_dual = np.zeros(m) # scaled dual", "= np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a + L) # Update the step", "given)'.format(len(beta.shape))) # Check the index's unchecked neighbors for local_idx in local_check: if not", "print('\\t\\tSolving u via Alternating Direction Method of Multipliers') n = len(y) m =", "- (prior_prob - post_prob) / weights print(weights) print(y) if dual_solver == 'cd': #", "u0 if u0 is not None else np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0)", "while to_check: if verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) # Get the", "exchange for varying the step size #W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a =", "1)) # Get the next unchecked point on the grid idx = to_check.popleft()", "AIC = 2k - 2ln(L) aic_trace[i] = 2. * dof_trace[i] - 2. *", "for i in primal_residual_x] + [i for i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i", "+ csc_matrix(np.eye(n))) # Cache the LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x =", "0 while not converged and cur_step < max_steps: # Update x x_numerator =", "np.exp(beta)) u = initial_values else: beta = initial_values['beta'] prior_prob = initial_values['c'] u =", "2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for the i'th value", "the step counter and update the previous objective value cur_step += 1 prev_objective", "sla from functools import partial from collections import deque from pygfl.solver import TrailSolver", "u u = u + a * primal_residual # Check convergence primal_resnorm =", "dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace,", "the cross-product D^T x, where D is the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0],", "- 2ln(L) aic_trace[i] = 2. * dof_trace[i] - 2. * log_likelihood_trace[i] # Calculate", "idx = to_check.popleft() except: break # Edge case -- If we went through", "''' A simple Gaussian distribution with known mean and stdev. ''' def __init__(self,", "plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized 1-d regression for each of the plateaus.'''", "self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose - 1),", "run self.reset() # Fit to the final values results = self.run(flat_data, penalties, _lambda=_lambda,", "entries Kl = np.array([-a] * (n-1)) # below the diagonal Ku = np.array([-a]", "/ a) dual_residual = a * _1d_fused_lasso_crossprod(z_new - z) z = z_new primal_residual", "of the vector of betas if grid_map is not None: grid_points[grid_map != -1]", "= np.abs(cur_nll - prev_nll) / (prev_nll + converge) if verbose: print('\\tDelta: {0}'.format(delta)) #", "dual_solver != 'graph': # weights is a diagonal matrix, represented as a vector", "# Recalculate the x_denominator since we changed the step-size # TODO: is this", "if check_map[idx]: break # Create the plateau and calculate the inclusion conditions cur_plateau", "!= -1] = results['beta'][grid_map[grid_map != -1]] else: grid_points = results['beta'].reshape(data.shape) # Count the", "= np.zeros(m) # penalty term s = np.zeros(m) # slack variable for penalty", "= np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged =", "check {0} {1}'.format(len(to_check), check_map.shape)) # Loop until every beta index has been checked", "functools import partial from collections import deque from pygfl.solver import TrailSolver class GaussianKnown:", "[DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return", "update the previous objective value cur_step += 1 prev_objective = cur_objective return u", "weights = np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1)) # Get", "Dx + (1 - alpha) * z # Over-relaxation z_new = _soft_threshold(Dx_hat +", "- prev_nll) / (prev_nll + converge) if verbose: print('\\tDelta: {0}'.format(delta)) # Track the", "'graph': # weights is a diagonal matrix, represented as a vector for efficiency", "1) # right # 2d case -- check left, right, up, and down", "plateaus # Save the final run parameters to use for warm-starting the next", "'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100,", "_lambda, verbose > 1, u0=u) elif dual_solver == 'admm': # Solve the dual", "bounds = [(-_lambda, _lambda) for _ in u0] # Fit results = minimize(_u_objective_func,", "primal_residual # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual **", "(A * A).sum(axis=0) r = x - A.dot(u) delta = converge + 1", "if initial_values is None: beta = np.zeros(data.shape) prior_prob = np.exp(beta) / (1 +", "(prev_nll + converge) if verbose: print('\\tDelta: {0}'.format(delta)) # Track the step self.add_step(post_prob, beta,", "z = np.zeros(n) # slack variable for likelihood r = np.zeros(m) # penalty", "Multipliers') n = len(y) m = D.shape[0] a = inflate * _lambda #", "dual_solver == 'graph' else None if grid_data is not None: grid_points = np.zeros(grid_data.shape)", "max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose:", "# DEBUGGING if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob:", "residuals # This is the varying penalty extension to standard ADMM a *=", "= y - (1. / weights) * penalties.T.dot(u) # Get the current log-likelihood", "Recalculate the x_denominator since we changed the step-size # TODO: is this worth", "grid points for this plateau if grid_map is not None: plateau_data = np.array([data[grid_map[x,y]]", "of the points in a plateau.''' return minimize_scalar(plateau_loss_func, args=(data, signal_dist, null_dist), bounds=(0,1), method='Bounded').x", "1 if initial_values is None: beta = np.zeros(data.shape) prior_prob = np.exp(beta) / (1", "# diagonal entries Kl = np.array([-a] * (n-1)) # below the diagonal Ku", "u0 is not None else np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0) r =", "initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u }", "dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold) + 1.", "* y + D.T.dot(a * z - u) x = np.linalg.solve(x_denominator, x_numerator) Dx", "cur_objective return u def _u_slsqp(self, x, A, _lambda, verbose, u0=None): '''Solve for u", "converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold if primal_resnorm > 5", "aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace),", "= D.T.dot(D) W_over_a = np.diag(weights / a) x_denominator = W_over_a + L #x_denominator", "cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective,", "args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x def plateau_regression(self, plateaus, data, grid_map=None,", "vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob =", "cur_step = 0 while delta > converge and cur_step < max_steps: if verbose", "dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom of each final solution log_likelihood_trace", "z # Update u u = u + a * primal_residual # Check", "is None: self.penalties_cross_x = np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters = [] self.beta_iters", "box constraints bounds = [(-_lambda, _lambda) for _ in u0] # Fit results", "_lambda, cd_converge, cd_max_steps, verbose > 1, initial_values=u) #u = self._u_admm(y, weights, _lambda, penalties,", "n - 1 a = _lambda # The D matrix is the first-difference", "results_trace = [] best_idx = None best_plateaus = None flat_data = data.flatten() edges", "/ a) x_denominator = W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a + L) #", "z_new primal_residual = Dx_relaxed - z # Update u u = u +", "the naive DoF if verbose: print('Calculating AIC') # Get the negative log-likelihood log_likelihood_trace[i]", "primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold # Update", "_lambda # step-size parameter # Set up system involving graph Laplacian L =", "value cur_step += 1 prev_objective = cur_objective return u def _u_slsqp(self, x, A,", "if verbose: print('Calculating degrees of freedom') # Create a grid structure out of", "= alpha * Dx + (1 - alpha) * z # Over-relaxation z_new", "n = len(f) x = [0] * n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i]", "np.sign(x) * (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm) solver", "penalty term s = np.zeros(m) # slack variable for penalty u_dual = np.zeros(n)", "np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters = [] self.beta_iters = [] self.c_iters", "dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1.,", "+ a D^T D) # where W is the diagonal matrix of weights.", "print('Calculating degrees of freedom') # Create a grid structure out of the vector", "up system involving graph Laplacian L = D.T.dot(D) W_over_a = np.diag(weights / a)", "log_likelihood_trace[i] # Calculate AICc = AIC + 2k * (k+1) / (n -", "Method of Multipliers') n = len(y) m = D.shape[0] a = _lambda #", "= np.zeros(lambda_grid.shape) # The BIC score for each lambda value dof_trace = np.zeros(lambda_grid.shape)", "D is the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return", "beta = initial_values['beta'] # z = initial_values['z'] # u = initial_values['u'] # n", "m = n - 1 a = _lambda # The D matrix is", "weights[idx if grid_map is None else grid_map[idx[0], idx[1]]] = w posteriors = self._e_step(data,", "verbose: print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC:", "print('\\t\\tForming dual...') x = np.sqrt(weights) * y A = (1. / np.sqrt(weights))[:,np.newaxis] *", "u = u0 if u0 is not None else np.zeros(A.shape[1]) l2_norm_A = (A", "> 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) # Get the next unchecked point on", "print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx],", "= -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T x, where", "dual variables from warm start if initial_values is None: # Graph Laplacian L", "beta = [0] n = len(f) x = [0] * n for i", "+ [a]) + W # diagonal entries Kl = np.array([-a] * (n-1)) #", "admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the solution path of the generalized lasso to", "in p: weights[idx if grid_map is None else grid_map[idx[0], idx[1]]] = w posteriors", "solution beta = y - (1. / weights) * penalties.T.dot(u) # Get the", "penalty u_dual = np.zeros(n) # scaled dual variable for constraint x = z", "# Dual update primal_residual_x = x_accel - z primal_residual_r = r_accel - s", "z = z_new primal_residual = Dx_relaxed - z # Update u u =", "signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight", "Track the change in the objective function value cur_objective = _u_objective_func(u, x, A)", "# penalty term s = np.zeros(m) # slack variable for penalty u_dual =", "* n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] +", "grid idx = to_check.popleft() # If we already have checked this one, just", "self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def _e_step(self, data,", "- post_prob) / weights print(weights) print(y) if dual_solver == 'cd': # Solve the", "prior_prob = initial_values['c'] u = initial_values['u'] prev_nll = 0 cur_step = 0 while", "data, grid_map=None, verbose=False): '''Perform unpenalized 1-d regression for each of the plateaus.''' weights", "{3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace, 'bic':", "penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph',", "stdev def pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev)", "'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the results of the", "'r': r, 'z': z, 's': s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace':", "cur_step < max_steps: if verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...') # Get the", "self.solver.alpha = alpha self.solver.inflate = inflate self.solver.maxsteps = max_steps self.solver.converge = converge self.solver.set_data(y,", "(1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights = (prior_prob * (1 - prior_prob))", "a = _lambda # The D matrix is the first-difference operator. K is", "= None flat_data = data.flatten() edges = penalties[3] if dual_solver == 'graph' else", "grid_points[:,:] = np.nan for i, _lambda in enumerate(lambda_grid): if verbose: print('#{0} Lambda =", "of multipliers.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers') n", "matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x) -", "prior_prob = np.exp(beta) / (1 + np.exp(beta)) u = initial_values else: beta =", "0: local_check.append((idx[0], idx[1] - 1)) # down if idx[1] < beta.shape[1] - 1:", "self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose,", "verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif dual_solver ==", "= converge self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights) if initial_values is not None:", "u) x = tridiagonal_solve(Kl, Ku, Kd, W * y + out) Dx =", "L) # Initialize primal and dual variables if initial_values is None: x =", "case -- check left, right, up, and down elif len(beta.shape) == 2: if", "verbose > 1, u0=u) elif dual_solver == 'lbfgs': # Solve the dual via", "the plateaus.''' weights = np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1))", "import TrailSolver class GaussianKnown: ''' A simple Gaussian distribution with known mean and", "verbose > 1, initial_values=u) u = self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose", "dual_solver == 'lbfgs': # Solve the dual via L-BFGS-B u = self._u_lbfgsb(x, A,", "The AIC score for each lambda value aicc_trace = np.zeros(lambda_grid.shape) # The AICc", "all of the points in a plateau.''' return minimize_scalar(plateau_loss_func, args=(data, signal_dist, null_dist), bounds=(0,1),", "= prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u)) r +=", "primal_resnorm)) return {'x': x, 'r': r, 'z': z, 's': s, 'u_dual': u_dual, 't_dual':", "library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype =", "if verbose: print('\\t\\tSolving u via Sequential Least Squares') if u0 is None: u0", "value of u.''' return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u))", "if verbose > 1: print('\\t\\tM-step delta: {0}'.format(delta)) # Increment the step counter cur_step", "/ (weights + a) x_accel = alpha * x + (1 - alpha)", "z = initial_values['z'] # u = initial_values['u'] # n = y.shape[0] # self.graphfl_weight(n,", "weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight", "in enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1)) # Get the subset of grid points", "except: break # Edge case -- If we went through all the indices", "Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha", "= np.zeros(n) # scaled dual variable for constraint x = z t_dual =", "cur_step < max_steps: if verbose > 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') #", "number of free parameters in the grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges)", "grid_points[grid_map != -1] = results['beta'][grid_map[grid_map != -1]] else: grid_points = results['beta'].reshape(data.shape) # Count", "def plateau_loss_func(c, data, signal_dist, null_dist): '''The negative log-likelihood function for a plateau.''' return", "args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction", "= np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1], dtype='double') # else: # beta =", "beta) # Track the convergence delta = np.abs(prev_nll - cur_nll) / (prev_nll +", "post_prob def _m_step(self, beta, prior_prob, post_prob, penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps, verbose,", "* penalties.T.dot(u) # Get the current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) # Track", "beta = u['x'] elif dual_solver == 'graph': u = self._graph_fused_lasso(y, weights, _lambda, penalties[0],", "+ c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] +", "on the grid idx = to_check.popleft() # If we already have checked this", "/ (1 + exp_beta)**2 y = (1+exp_beta)**2 * post_prob / exp_beta + beta", "!= -1]] else: grid_points = results['beta'].reshape(data.shape) # Count the number of free parameters", "stdev. ''' def __init__(self, mean, stdev): self.mean = mean self.stdev = stdev def", "Kd = np.array([a] + [2*a] * (n-2) + [a]) + W # diagonal", "best_plateaus = None flat_data = data.flatten() edges = penalties[3] if dual_solver == 'graph'", "self.solver.maxsteps = max_steps self.solver.converge = converge self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights) if", "Back out beta from the dual solution beta = y - (1. /", "= penalties[3] if dual_solver == 'graph' else None if grid_data is not None:", "and check_map[idx]: try: idx = to_check.popleft() except: break # Edge case -- If", "# above the diagonal # Initialize primal and dual variables if initial_values is", "* x + (1 - alpha) * z # over-relaxation # Update constraint", "\\ and beta[local_idx] <= max_member: # Label this index as being checked so", "this plateau if grid_map is not None: plateau_data = np.array([data[grid_map[x,y]] for x,y in", "+ beta[i+1] return np.array(x) def ilogit(x): return 1. / (1. + np.exp(-x)) def", "converge_threshold if primal_resnorm > 5 * dual_resnorm: a *= inflate u_dual /= inflate", "the next unchecked point on the grid idx = to_check.popleft() # If we", "of the plateaus.''' weights = np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if verbose: print('\\tPlateau", "specified explicitly. ({0} given)'.format(len(beta.shape))) # Check the index's unchecked neighbors for local_idx in", "alpha self.solver.inflate = inflate self.solver.maxsteps = max_steps self.solver.converge = converge self.solver.set_data(y, edges, ntrails,", "= np.array([-a] * (n-1)) # above the diagonal cur_step += 1 if verbose", "prev_nll = self._m_log_likelihood(post_prob, beta) delta = converge + 1 u = u0 cur_step", "with the given penalty matrix.''' delta = converge + 1 if initial_values is", "neighbors to check local_check = [] # Generic graph case if edges is", "the negative log-likelihood tracker prev_nll = cur_nll return beta, u def _m_log_likelihood(self, post_prob,", "= to_check.popleft() except: break # Edge case -- If we went through all", "objective function value cur_objective = _u_objective_func(u, x, A) delta = np.abs(prev_objective - cur_objective)", "If we went through all the indices without reaching an unchecked one. if", "and right elif len(beta.shape) == 1: if idx[0] > 0: local_check.append(idx[0] - 1)", "dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the solution path of the", "/ (prev_nll + converge) if verbose: print('\\tDelta: {0}'.format(delta)) # Track the step self.add_step(post_prob,", "a cached LU decomposition.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of", "= (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 # Use the naive DoF if verbose:", "self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate the complete-data sufficient", "self.solver.beta, 'z': self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self, y, weights, _lambda, D, converge_threshold,", "r = _soft_threshold(arg, local_lambda / a) r_accel = alpha * r + (1", "local_lambda = (_lambda - np.abs(arg) / 2.).clip(0) if adaptive else _lambda r =", "return -np.log(c * signal_dist.pdf(data) + (1. - c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist,", "weighted least squares if dual_solver != 'admm' and dual_solver != 'graph': # weights", "a diagonal matrix, represented as a vector for efficiency weights = 0.5 *", "- 1 a = _lambda # The D matrix is the first-difference operator.", "y + a * (z - u_dual)) / (weights + a) x_accel =", "jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T x, where D is the", "z = initial_values['z'] u = initial_values['u'] primal_trace = [] dual_trace = [] converged", "_lambda, verbose, u0=None): '''Solve for u using sequential least squares.''' if verbose: print('\\t\\tSolving", "cur_unchecked: idx = cur_unchecked.popleft() # neighbors to check local_check = [] # Generic", "# Back out beta from the dual solution beta = y - (1.", "Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0] beta =", "previous run self.reset() # Fit to the final values results = self.run(flat_data, penalties,", "it's not re-checked unnecessarily check_map[local_idx] = True # Add it to the plateau", "results # Save the trace of all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w'])", "penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) #", "z) dual_residual_t = a * (s_new - s) z = z_new s =", "self._e_step(data, prior_prob) if verbose: print('\\tM-step...') # Find beta using an alternating Taylor approximation", "'z': z, 's': s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps':", "1 # Use the naive DoF if verbose: print('Calculating AIC') # Get the", "a = inflate * _lambda # step-size parameter # Initialize primal and dual", "the matrix (W + a D^T D) # where W is the diagonal", "previous objective value cur_step += 1 prev_objective = cur_objective return u def _u_slsqp(self,", "= True # Add it to the plateau and the list of local", "in log-likelihood to see if we've converged delta = np.abs(cur_nll - prev_nll) /", "z_new = _soft_threshold(Dx_hat + u / a, _lambda / a) dual_residual = a", "with a cached LU decomposition.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method", "* null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d regression on all of", "self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters", "(flat_data.shape[0] - dof_trace[i] - 1.) # Calculate BIC = -2ln(L) + k *", "setting (by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx],", "def reset(self): self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters =", "'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best':", "A, _lambda, converge, max_steps, verbose, u0=None): '''Solve for u using coordinate descent.''' if", "for likelihood r = np.zeros(m) # penalty term s = np.zeros(m) # slack", "cd_converge, cd_max_steps, verbose > 1, initial_values=u) u = self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge,", "at a time. for coord in range(len(u)): prev_u = u[coord] next_u = prev_u", "each plateau's indices plateaus.append((val, cur_plateau)) # Returns the list of plateaus and their", "dual_solver == 'admm': # Get the negative log-likelihood of the data given our", "primal and dual residuals # This is the varying penalty extension to standard", "[] # ''' Load the graph fused lasso library ''' # graphfl_lib =", "(1 + exp_beta) if verbose > 1: print('\\t\\tForming dual...') x = np.sqrt(weights) *", "print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if", "using alternating direction method of multipliers.''' if verbose: print('\\t\\tSolving u via Alternating Direction", "-- check left and right elif len(beta.shape) == 1: if idx[0] > 0:", "A, _lambda, verbose > 1, u0=u) elif dual_solver == 'admm': # Solve the", "(1. / weights) * penalties.T.dot(u) # Get the current log-likelihood cur_nll = self._m_log_likelihood(post_prob,", "of Multipliers') n = len(y) m = D.shape[0] a = _lambda # step-size", "converged = False cur_step = 0 D_full = D while not converged and", "np.zeros(m) # scaled dual variable for constraint r = s else: lu_factor =", "beta, u = self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose,", "# Edge case -- If we went through all the indices without reaching", "_u_admm(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u", "np.array([-a] * (n-1)) # below the diagonal Ku = np.array([-a] * (n-1)) #", "= _1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl, Ku, Kd, W * y +", "u['beta'] # if np.abs(beta).max() > 20: # beta = np.clip(beta, -20, 20) #", "local_idx in local_check: if not check_map[local_idx] \\ and beta[local_idx] >= min_member \\ and", "based on norm of primal and dual residuals a *= 2 if primal_resnorm", "1 # Update the negative log-likelihood tracker prev_nll = cur_nll return beta, u", "_lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get", "direction method of multipliers. Note that this method only works for the 1-D", "* (k+1) / (n - k - 1) aicc_trace[i] = aic_trace[i] + 2", "'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w':", "# Get the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC =", "np.array([a] + [2*a] * (n-2) + [a]) + W # diagonal entries Kl", "{1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver !=", "u_dual + primal_residual_x t_dual = t_dual + primal_residual_r # Check convergence primal_resnorm =", "conditions cur_plateau = set([idx]) cur_unchecked = deque([idx]) val = beta[idx] min_member = val", "right if idx[1] > 0: local_check.append((idx[0], idx[1] - 1)) # down if idx[1]", "s # Projection to constraint set arg = x_accel + u_dual + D.T.dot(r_accel", "1-d regression on all of the points in a plateau.''' return minimize_scalar(plateau_loss_func, args=(data,", "u = initial_values['u'] primal_trace = [] dual_trace = [] converged = False cur_step", "np.sqrt((np.array([i for i in dual_residual_u] + [i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm)", "to see if we've converged delta = np.abs(cur_nll - prev_nll) / (prev_nll +", "- a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1]", "dtype='double') # u = np.zeros(breakpoints[-1], dtype='double') # else: # beta = initial_values['beta'] #", "== 'cd': # Solve the dual via coordinate descent u = self._u_coord_descent(x, A,", "[] dual_trace = [] converged = False cur_step = 0 D_full = D", "(z - u_dual)) / (weights + a) x_accel = alpha * x +", "iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache the exponentiated beta exp_beta = np.exp(beta) #", "cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif dual_solver", "Gaussian distribution with known mean and stdev. ''' def __init__(self, mean, stdev): self.mean", "best_idx, 'plateaus': best_plateaus} def run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001,", "= [] converged = False cur_step = 0 D_full = D while not", "else: x = initial_values['x'] z = initial_values['z'] u = initial_values['u'] primal_trace = []", "[{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver", "plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized 1-d regression for each of the", "sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1))", "norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object):", "Get the likelihood weights vector (E-step) post_prob = self._e_step(data, prior_prob) if verbose: print('\\tM-step...')", "else np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0) r = x - A.dot(u) delta", "initial_values=None, inflate=2., adaptive=False): '''Solve for u using alternating direction method of multipliers with", "if primal_resnorm > 10 * dual_resnorm else 0.5 # Recalculate the x_denominator since", "results.x def plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized 1-d regression for each", "primal_residual = Dx_relaxed - z # Update u u = u + a", "elif dual_solver == 'graph': u = self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3],", "inflate t_dual /= inflate elif dual_resnorm > 5 * primal_resnorm: a /= inflate", "x,y in p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in p: weights[idx", "(prior_prob - post_prob) / weights print(weights) print(y) if dual_solver == 'cd': # Solve", "works for the 1-D fused lasso case.''' if verbose: print('\\t\\tSolving u via Alternating", "val = beta[idx] min_member = val - rel_tol max_member = val + rel_tol", "= penalties_cross_x self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters =", "breakpoints, _lambda, alpha, inflate, max_steps, converge, beta, z, u) # return {'beta': beta,", "as being checked so it's not re-checked unnecessarily check_map[local_idx] = True # Add", "initial_values is not None: self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u = initial_values['u']", "{0}'.format(dual_solver)) if dual_solver != 'admm' and dual_solver != 'graph': # Back out beta", "D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for u using alternating", "#{0}'.format(cur_step)) if verbose: print('\\tE-step...') # Get the likelihood weights vector (E-step) post_prob =", "term r arg = s - t_dual local_lambda = (_lambda - np.abs(arg) /", "in primal_residual_x] + [i for i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i", "out) Dx = np.ediff1d(x) # Update z Dx_hat = alpha * Dx +", "flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double,", "+ 1 if initial_values is None: beta = np.zeros(data.shape) prior_prob = np.exp(beta) /", "# if np.abs(beta).max() > 20: # beta = np.clip(beta, -20, 20) # u", "initial_values=None): '''Runs the Expectation-Maximization algorithm for the data with the given penalty matrix.'''", "- s u_dual = u_dual + primal_residual_x t_dual = t_dual + primal_residual_r #", "(weights vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob", "Matrix Algorithm (a.k.a Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha =", "prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta))", "weights, _lambda, ntrails, trails, breakpoints, edges, converge, max_steps, verbose, alpha, inflate, initial_values=None): '''Solve", "results['beta'][grid_map[grid_map != -1]] else: grid_points = results['beta'].reshape(data.shape) # Count the number of free", "freedom calculation does not currently support more than 2 dimensions unless edges are", "- a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i", "inflate=2., adaptive=False): '''Solve for u using alternating direction method of multipliers with a", "not currently support more than 2 dimensions unless edges are specified explicitly. ({0}", "* Dx + (1 - alpha) * z # Over-relaxation z_new = _soft_threshold(Dx_hat", "weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta", "checked so it's not re-checked unnecessarily check_map[local_idx] = True # Add it to", "c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver()", "= u0 cur_step = 0 while delta > converge and cur_step < max_steps:", "# Set up system involving graph Laplacian L = D.T.dot(D) W_over_a = np.diag(weights", "''' def __init__(self, mean, stdev): self.mean = mean self.stdev = stdev def pdf(self,", "verbose > 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache the exponentiated beta", "one, just pop it off while to_check and check_map[idx]: try: idx = to_check.popleft()", "log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) - np.log(2 * np.pi)) # Track the best", "np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters", "None else grid_map[idx[0], idx[1]]] = w posteriors = self._e_step(data, weights) weights = weights.flatten()", "jac[idx] = -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T x,", "'lu_factor': lu_factor} def _u_admm(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None):", "for warm-starting the next iteration initial_values = results # Save the trace of", "min_member \\ and beta[local_idx] <= max_member: # Label this index as being checked", "# Track the change in log-likelihood to see if we've converged delta =", "* y + a * (z - u_dual)) / (weights + a) x_accel", "< max_steps: # Update x out = _1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl,", "the log-likelihood of the betas given the weights and data.''' return (np.log(1 +", "next_u)) r += A.T[coord] * prev_u - A.T[coord] * u[coord] # Track the", "= data.flatten() edges = penalties[3] if dual_solver == 'graph' else None if grid_data", "min_member = val - rel_tol max_member = val + rel_tol # Check every", ">= dof_tolerance).sum() + 1 # Use the naive DoF if verbose: print('Calculating AIC')", "np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1], dtype='double') # else: # beta = initial_values['beta']", "initial_values=None): '''Solve for u using alternating direction method of multipliers. Note that this", "1: print('\\t\\tForming dual...') x = np.sqrt(weights) * y A = (1. / np.sqrt(weights))[:,np.newaxis]", "primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i in dual_residual_u] + [i for i in", "Update x x_numerator = 1.0 / a * weights * y + D.T.dot(a", "'''Efficiently compute the cross-product D^T x, where D is the first-differences matrix.''' return", "penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): '''", "Dx_relaxed = alpha * Dx + (1 - alpha) * z # over-relax", "def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d regression on all of the points", "def run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph',", "np.zeros(m) # penalty term s = np.zeros(m) # slack variable for penalty u_dual", "Calculate AIC = 2k - 2ln(L) aic_trace[i] = 2. * dof_trace[i] - 2.", "verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step,", "is None: # Graph Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the", "# 1d case -- check left and right elif len(beta.shape) == 1: if", "m = D.shape[0] a = _lambda # step-size parameter # Set up system", "if idx[1] < beta.shape[1] - 1: local_check.append((idx[0], idx[1] + 1)) # up #", "primal_resnorm)) dof = np.sum(Dx > converge_threshold) + 1. AIC = np.sum((y - x)**2)", "variable for constraint r = s else: lu_factor = initial_values['lu_factor'] x = initial_values['x']", "sequential least squares.''' if verbose: print('\\t\\tSolving u via Sequential Least Squares') if u0", "[c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double,", "involving graph Laplacian L = D.T.dot(D) W_over_a = np.diag(weights / a) x_denominator =", "= initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u", "else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver != 'admm' and dual_solver != 'graph':", "= self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive,", "x = np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) # Update z Dx_relaxed = alpha", "weights is a diagonal matrix, represented as a vector for efficiency weights =", "= mean self.stdev = stdev def pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def", "np.abs(cur_nll - prev_nll) / (prev_nll + converge) if verbose: print('\\tDelta: {0}'.format(delta)) # Track", "# Solve the dual via L-BFGS-B u = self._u_lbfgsb(x, A, _lambda, verbose >", "x = (weights * y + a * (z - u_dual)) / (weights", "u using alternating direction method of multipliers.''' if verbose: print('\\t\\tSolving u via Alternating", "idx[1] + 1)) # up # Only supports 1d and 2d cases for", "< converge_threshold if primal_resnorm > 5 * dual_resnorm: a *= inflate u_dual /=", "_lambda, cd_converge, cd_max_steps, verbose > 1, u0=u) elif dual_solver == 'sls': # Solve", "'''Solve for u using coordinate descent.''' if verbose: print('\\t\\tSolving u via Coordinate Descent')", "L-BFGS-B u = self._u_lbfgsb(x, A, _lambda, verbose > 1, u0=u) elif dual_solver ==", "calculation does not currently support more than 2 dimensions unless edges are specified", "not None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map != -1]] else: grid_points = results['beta'].reshape(data.shape)", "self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters)", "size #W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights / a) #x_denominator =", "0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx >", "dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold) + 1. AIC = np.sum((y -", "[] def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20,", "initial_values=None): '''Solve for u using a super fast graph fused lasso library that", "likelihood r = np.zeros(m) # penalty term s = np.zeros(m) # slack variable", "+= A.T[coord] * prev_u - A.T[coord] * u[coord] # Track the change in", "'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0,", "= sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a +", "delta = converge + 1 prev_objective = _u_objective_func(u, x, A) cur_step = 0", "z #primal_residual = Dx_hat - z # Update u u = (u +", "check_map[local_idx] \\ and beta[local_idx] >= min_member \\ and beta[local_idx] <= max_member: # Label", "u / a, _lambda / a) dual_residual = a * _1d_fused_lasso_crossprod(z_new - z)", "# The degrees of freedom of each final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace", "plateau's indices plateaus.append((val, cur_plateau)) # Returns the list of plateaus and their values", "{'beta': beta, 'u': u, 'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate", "= np.zeros(lambda_grid.shape) # The AIC score for each lambda value aicc_trace = np.zeros(lambda_grid.shape)", "= np.zeros(m) u = np.zeros(m) else: x = initial_values['x'] z = initial_values['z'] u", "\\ and beta[local_idx] >= min_member \\ and beta[local_idx] <= max_member: # Label this", "self.solver = TrailSolver() def add_step(self, w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta)", "Second-order Taylor-series expansion about the current iterate and coordinate descent to optimize Beta.", "dual_residual_u] + [i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm <", "diagonal # Initialize primal and dual variables if initial_values is None: x =", "_lambda in enumerate(lambda_grid): if verbose: print('#{0} Lambda = {1}'.format(i, _lambda)) # Clear out", "None: beta = np.zeros(data.shape) prior_prob = np.exp(beta) / (1 + np.exp(beta)) u =", "post_prob / exp_beta + beta - (1 + exp_beta) if verbose > 1:", "# left if idx[0] < beta.shape[0] - 1: local_check.append(idx[0] + 1) # right", "of all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0}", "max_steps: # Update x x_numerator = 1.0 / a * weights * y", "admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm for the data with the given penalty", "(prior_prob * (1 - prior_prob)) y = beta - (prior_prob - post_prob) /", "while not converged and cur_step < max_steps: # Update x x = (weights", "tridiagonal_solve(a,b,c,f): alpha = [0] beta = [0] n = len(f) x = [0]", "self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def", "admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the solution path of the generalized", "print('\\t\\tSolving u via Sequential Least Squares') if u0 is None: u0 = np.zeros(A.shape[1])", "the data given our new parameters cur_nll += _lambda * np.abs(u['r']).sum() # Track", "fast graph fused lasso library that has an optimized ADMM routine.''' if verbose:", "to optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta) delta = converge + 1", "via Alternating Direction Method of Multipliers') n = len(y) m = D.shape[0] a", "for x in beta.shape])) if edges is None else deque(range(len(beta))) check_map = np.zeros(beta.shape,", "# Get the next unchecked point on the grid idx = to_check.popleft() #", "+ 1 # Use the naive DoF if verbose: print('Calculating AIC') # Get", "x_accel = alpha * x + (1 - alpha) * z # over-relaxation", "unchecked point on the grid idx = to_check.popleft() # If we already have", "null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate the complete-data sufficient statistics (weights vector).''' signal_weight", "rel_tol # Check every possible boundary of the plateau while cur_unchecked: idx =", "Least Squares') if u0 is None: u0 = np.zeros(A.shape[1]) # Create our box", "dual_trace = [] converged = False cur_step = 0 while not converged and", "log-likelihood function for a plateau.''' return -np.log(c * signal_dist.pdf(data) + (1. - c)", "Graph Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the LU decomposition lu_factor", "the grid idx = to_check.popleft() # If we already have checked this one,", "efficiency weights = 0.5 * exp_beta / (1 + exp_beta)**2 y = (1+exp_beta)**2", "= s else: lu_factor = initial_values['lu_factor'] x = initial_values['x'] z = initial_values['z'] r", "u def _m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood of the betas given the", "initial_values['u'] # n = y.shape[0] # self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints, _lambda,", "Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta) delta = converge + 1 u =", "y = (1+exp_beta)**2 * post_prob / exp_beta + beta - (1 + exp_beta)", "else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus = [] if", "cur_step += 1 # Update the negative log-likelihood tracker prev_nll = cur_nll #", "a * (s_new - s) z = z_new s = s_new # Dual", "Alternating Second-order Taylor-series expansion about the current iterate and coordinate descent to optimize", "Multipliers (1-D fused lasso)') n = len(y) m = n - 1 a", "raise Exception('Degrees of freedom calculation does not currently support more than 2 dimensions", "= 0 while delta > converge and cur_step < max_steps: if verbose: print('Step", "x, where D is the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x,", "= np.zeros(lambda_grid.shape) beta_trace = [] u_trace = [] w_trace = [] c_trace =", "since we changed the step-size # TODO: is this worth it? We're paying", "1) aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] -", "dual via alternating direction methods of multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge,", "Direction Method of Multipliers') n = len(y) m = D.shape[0] a = _lambda", "_data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative log-likelihood of the data given the weights.'''", "D^T x, where D is the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def", "{'beta': beta, 'z': z, 'u': u } self.solver.alpha = alpha self.solver.inflate = inflate", "= _soft_threshold(Dx_relaxed + u / a, _lambda / a) dual_residual = a *", "self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights) if initial_values is not None: self.solver.beta =", "if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD", "_lambda, verbose, u0=None): '''Solve for u using L-BFGS-B.''' if verbose: print('\\t\\tSolving u via", "if verbose: print('\\tCalculating plateaus...') if verbose > 1: print('\\tIndices to check {0} {1}'.format(len(to_check),", "z = np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1], dtype='double') # else: # beta", "inflate, max_steps, converge, beta, z, u) # return {'beta': beta, 'z': z, 'u':", "coordinate one at a time. for coord in range(len(u)): prev_u = u[coord] next_u", "Track the convergence delta = np.abs(prev_nll - cur_nll) / (prev_nll + converge) if", "x + (1 - alpha) * z # over-relaxation # Update constraint term", "where W is the diagonal matrix of weights. We use a tridiagonal representation", "left if idx[0] < beta.shape[0] - 1: local_check.append((idx[0] + 1, idx[1])) # right", "breakpoints, weights=weights) if initial_values is not None: self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z']", "D.T.dot(z_new - z) z = z_new primal_residual = Dx_relaxed - z # Update", "all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0} AIC:", "{0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best", "z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold,", "data, signal_dist, null_dist): '''The negative log-likelihood function for a plateau.''' return -np.log(c *", "* (ln(n) - ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data))", "max_member: # Label this index as being checked so it's not re-checked unnecessarily", "our new parameters cur_nll += _lambda * np.abs(u['r']).sum() # Track the change in", "np.zeros(m) u = np.zeros(m) else: x = initial_values['x'] z = initial_values['z'] u =", "= alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def ilogit(x): return 1. / (1. +", "= (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights = (prior_prob * (1 -", "rel_tol max_member = val + rel_tol # Check every possible boundary of the", "W * y + out) Dx = np.ediff1d(x) # Update z Dx_hat =", "'best': best_idx, 'plateaus': best_plateaus} def run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100,", "np.nan for i, _lambda in enumerate(lambda_grid): if verbose: print('#{0} Lambda = {1}'.format(i, _lambda))", "of K. Kd = np.array([a] + [2*a] * (n-2) + [a]) + W", "u[coord] # Track the change in the objective function value cur_objective = _u_objective_func(u,", "W_over_a = np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a + L) # Update the", "np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return", "+ np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus (degrees of freedom)", "inflate elif dual_resnorm > 5 * primal_resnorm: a /= inflate u_dual *= inflate", "_u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function", "and coordinate descent to optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta) delta =", "first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x)", "len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 # Use the naive DoF", "values results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose,", "Direction Method of Multipliers') n = len(y) m = D.shape[0] a = inflate", "## Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f):", "verbose > 1: print('\\t\\tM-step delta: {0}'.format(delta)) # Increment the step counter cur_step +=", "the varying penalty extension to standard ADMM a *= 2 if primal_resnorm >", "* dof_trace[i] - 2. * log_likelihood_trace[i] # Calculate AICc = AIC + 2k", "graph fused lasso library that has an optimized ADMM routine.''' if verbose: print('\\t\\tSolving", "plateaus...') if verbose > 1: print('\\tIndices to check {0} {1}'.format(len(to_check), check_map.shape)) # Loop", "AIC + 2k * (k+1) / (n - k - 1) aicc_trace[i] =", "= np.array([a] + [2*a] * (n-2) + [a]) + W # diagonal entries", "= D.shape[0] a = _lambda # step-size parameter # Set up system involving", "results['c']) # Calculate AIC = 2k - 2ln(L) aic_trace[i] = 2. * dof_trace[i]", "- z # Update u u = u + a * primal_residual #", "if idx[1] > 0: local_check.append((idx[0], idx[1] - 1)) # down if idx[1] <", "a) dual_residual = a * D.T.dot(z_new - z) z = z_new primal_residual =", "= a * (s_new - s) z = z_new s = s_new #", "= u['beta'] # if np.abs(beta).max() > 20: # beta = np.clip(beta, -20, 20)", "edges=None, verbose=0): '''Calculate the plateaus (degrees of freedom) of a 1d or 2d", "None: local_check.extend(edges[idx]) # 1d case -- check left and right elif len(beta.shape) ==", "None else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver != 'admm' and dual_solver !=", "up # Only supports 1d and 2d cases for now else: raise Exception('Degrees", "m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm", "norm from scipy.optimize import minimize, minimize_scalar from scipy.sparse import csc_matrix, linalg as sla", "+ [i for i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i in dual_residual_u]", "the plateau and calculate the inclusion conditions cur_plateau = set([idx]) cur_unchecked = deque([idx])", "% 100 == 0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta))", "None else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus = []", "# Update the negative log-likelihood tracker prev_nll = cur_nll # DEBUGGING if verbose:", "grid_map is None else grid_map[idx[0], idx[1]]] = w posteriors = self._e_step(data, weights) weights", "the plateau and the list of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track", "below the diagonal Ku = np.array([-a] * (n-1)) # above the diagonal #", "(M-step) beta, u = self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps,", "and cur_step < max_steps: if verbose > 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...')", "_1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl, Ku, Kd, W * y + out)", "for u using sequential least squares.''' if verbose: print('\\t\\tSolving u via Sequential Least", "k - 1) aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) /", "DEBUGGING if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f},", "primal_residual_x] + [i for i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i in", "# The BIC score for each lambda value dof_trace = np.zeros(lambda_grid.shape) # The", "# if initial_values is None: # beta = np.zeros(y.shape, dtype='double') # z =", "> 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache the exponentiated beta exp_beta", "(1 + exp_beta)**2 y = (1+exp_beta)**2 * post_prob / exp_beta + beta -", "post_prob = self._e_step(data, prior_prob) if verbose: print('\\tM-step...') # Find beta using an alternating", "def _u_admm_lucache(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False):", "+ L) # Initialize primal and dual variables if initial_values is None: x", "def _u_admm(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for", "Create the plateau and calculate the inclusion conditions cur_plateau = set([idx]) cur_unchecked =", "= dual_resnorm < converge_threshold and primal_resnorm < converge_threshold # Update step-size parameter based", "TrailSolver() def add_step(self, w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self):", "2k - 2ln(L) aic_trace[i] = 2. * dof_trace[i] - 2. * log_likelihood_trace[i] #", "-- check left, right, up, and down elif len(beta.shape) == 2: if idx[0]", "update primal_residual_x = x_accel - z primal_residual_r = r_accel - s u_dual =", "u0=u) elif dual_solver == 'sls': # Solve the dual via sequential least squares", "AICc = AIC + 2k * (k+1) / (n - k - 1)", "if initial_values is None: # Graph Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) #", "method of multipliers. Note that this method only works for the 1-D fused", "= alpha self.solver.inflate = inflate self.solver.maxsteps = max_steps self.solver.converge = converge self.solver.set_data(y, edges,", "alpha) * z # over-relax Dx z_new = _soft_threshold(Dx_relaxed + u / a,", "alternating direction method of multipliers. Note that this method only works for the", "beta values in linear time.''' to_check = deque(itertools.product(*[range(x) for x in beta.shape])) if", "= [] u_trace = [] w_trace = [] c_trace = [] results_trace =", "verbose - 1), admm_alpha, admm_inflate, initial_values=u) beta = u['beta'] # if np.abs(beta).max() >", "and primal_resnorm < converge_threshold if primal_resnorm > 5 * dual_resnorm: a *= inflate", "(u + a * primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm = np.sqrt((primal_residual **", "data given the weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) *", "+ a * (z - u_dual)) / (weights + a) x_accel = alpha", "np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self):", "i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm <", "unnecessarily check_map[local_idx] = True # Add it to the plateau and the list", "= np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm", "s = initial_values['s'] u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace = [] dual_trace", "Returns the list of plateaus and their values return plateaus def plateau_loss_func(c, data,", "* (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] - 1.) # Calculate BIC = -2ln(L)", "are specified explicitly. ({0} given)'.format(len(beta.shape))) # Check the index's unchecked neighbors for local_idx", "_lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u) u = self._u_admm_lucache(y, weights, _lambda,", "* (n-1)) # above the diagonal # Initialize primal and dual variables if", "= initial_values['s'] u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace = [] dual_trace =", "max_steps, verbose, alpha, inflate, initial_values=None): '''Solve for u using a super fast graph", "u.''' return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx] =", "[0] beta = [0] n = len(f) x = [0] * n for", "dual residuals a *= 2 if primal_resnorm > 10 * dual_resnorm else 0.5", "if idx[0] > 0: local_check.append(idx[0] - 1) # left if idx[0] < beta.shape[0]", "Kd, W * y + out) Dx = np.ediff1d(x) # Update z Dx_hat", "self.signal_dist = signal_dist self.null_dist = null_dist if penalties_cross_x is None: self.penalties_cross_x = np.dot", "s - t_dual local_lambda = (_lambda - np.abs(arg) / 2.).clip(0) if adaptive else", "cases for now else: raise Exception('Degrees of freedom calculation does not currently support", "alternating direction method of multipliers with a cached LU decomposition.''' if verbose: print('\\t\\tSolving", "+ out) Dx = np.ediff1d(x) # Update z Dx_hat = alpha * Dx", "primal and dual variables if initial_values is None: x = np.array([y.mean()] * n)", "a *= inflate u_dual /= inflate t_dual /= inflate elif dual_resnorm > 5", "None if grid_data is not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan for", "Cache the exponentiated beta exp_beta = np.exp(beta) # Form the parameters for our", "- 1) aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0]", "weights = 0.5 * exp_beta / (1 + exp_beta)**2 y = (1+exp_beta)**2 *", "x, A) delta = np.abs(prev_objective - cur_objective) / (prev_objective + converge) if verbose", "np.array(x) def ilogit(x): return 1. / (1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None,", "TODO: is this worth it? We're paying a matrix inverse in exchange for", "= self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive,", "cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm for", "idx[1] - 1)) # down if idx[1] < beta.shape[1] - 1: local_check.append((idx[0], idx[1]", "-1]] else: grid_points = results['beta'].reshape(data.shape) # Count the number of free parameters in", "we already have checked this one, just pop it off while to_check and", "np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T", "ADMM a *= 2 if primal_resnorm > 10 * dual_resnorm else 0.5 #", "AICc score for each lambda value (correcting for finite sample size) bic_trace =", "the negative log-likelihood of the data given the weights.''' signal_weight = prior_prob *", "alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for u using alternating direction method of multipliers", "if verbose: print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i]))", "def _u_coord_descent(self, x, A, _lambda, converge, max_steps, verbose, u0=None): '''Solve for u using", "u = initial_values else: beta = initial_values['beta'] prior_prob = initial_values['c'] u = initial_values['u']", "converge + 1 if initial_values is None: beta = np.zeros(data.shape) prior_prob = np.exp(beta)", "ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'),", "np.zeros(lambda_grid.shape) # The degrees of freedom of each final solution log_likelihood_trace = np.zeros(lambda_grid.shape)", "= max_steps self.solver.converge = converge self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights) if initial_values", "Update x x = (weights * y + a * (z - u_dual))", "np.abs(arg) / 2.).clip(0) if adaptive else _lambda r = _soft_threshold(arg, local_lambda / a)", "using L-BFGS-B.''' if verbose: print('\\t\\tSolving u via L-BFGS-B') if u0 is None: u0", "self.solver.inflate = inflate self.solver.maxsteps = max_steps self.solver.converge = converge self.solver.set_data(y, edges, ntrails, trails,", "vector of betas if grid_map is not None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map", "self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters", "for coord in range(len(u)): prev_u = u[coord] next_u = prev_u + A.T[coord].dot(r) /", "alpha * Dx + (1 - alpha) * z # Over-relaxation z_new =", "z = np.zeros(m) u = np.zeros(m) else: x = initial_values['x'] z = initial_values['z']", "dual_solver != 'admm' and dual_solver != 'graph': # Back out beta from the", "print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self, x, A, _lambda, verbose, u0=None):", "and cur_step < max_steps: if verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...') # Get", "is a diagonal matrix, represented as a vector for efficiency weights = 0.5", "beta, z, u) # return {'beta': beta, 'z': z, 'u': u } self.solver.alpha", "dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self, y, weights, _lambda, D, converge_threshold, max_steps,", "x = [0] * n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i]", "initial_values['u'] prev_nll = 0 cur_step = 0 while delta > converge and cur_step", "+ D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u = a", "an alternating Taylor approximation and convex optimization (M-step) beta, u = self._m_step(beta, prior_prob,", "# Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean())", "if dual_solver != 'admm' and dual_solver != 'graph': # weights is a diagonal", "Squares') if u0 is None: u0 = np.zeros(A.shape[1]) # Create our box constraints", "algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0] beta = [0]", "plateaus.''' weights = np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1)) #", "in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def ilogit(x): return 1.", "(z_new - z) dual_residual_t = a * (s_new - s) z = z_new", "calculate the inclusion conditions cur_plateau = set([idx]) cur_unchecked = deque([idx]) val = beta[idx]", "dual_resnorm < converge_threshold and primal_resnorm < converge_threshold if primal_resnorm > 5 * dual_resnorm:", "[] best_idx = None best_plateaus = None flat_data = data.flatten() edges = penalties[3]", "primal_residual_x t_dual = t_dual + primal_residual_r # Check convergence primal_resnorm = np.sqrt((np.array([i for", "(n-1)) # below the diagonal Ku = np.array([-a] * (n-1)) # above the", "> 1, initial_values=u) #u = self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose >", "if dual_solver == 'admm': # Get the negative log-likelihood of the data given", "/ (signal_weight + null_weight) return post_prob def _m_step(self, beta, prior_prob, post_prob, penalties, _lambda,", "< bic_trace[best_idx]: best_idx = i best_plateaus = plateaus # Save the final run", "= cur_nll return beta, u def _m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood of", "if verbose: print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3}", "if verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...') # Get the likelihood weights vector", "boundary of the plateau while cur_unchecked: idx = cur_unchecked.popleft() # neighbors to check", "for u using alternating direction method of multipliers with a cached LU decomposition.'''", "and beta[local_idx] >= min_member \\ and beta[local_idx] <= max_member: # Label this index", "prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm': # Get", "if verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) # Get the next unchecked", "({0} given)'.format(len(beta.shape))) # Check the index's unchecked neighbors for local_idx in local_check: if", "(_lambda - np.abs(arg) / 2.).clip(0) if adaptive else _lambda r = _soft_threshold(arg, local_lambda", "A.dot(u) delta = converge + 1 prev_objective = _u_objective_func(u, x, A) cur_step =", "converge and cur_step < max_steps: if verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...') #", "from collections import deque from pygfl.solver import TrailSolver class GaussianKnown: ''' A simple", "u_dual + D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u =", "if edges is None else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True", "np.array([-a] * (n-1)) # above the diagonal # Initialize primal and dual variables", "right, up, and down elif len(beta.shape) == 2: if idx[0] > 0: local_check.append((idx[0]", "converge_threshold) + 1. AIC = np.sum((y - x)**2) + 2 * dof return", "n = len(y) m = D.shape[0] a = _lambda # step-size parameter #", "< converge_threshold and primal_resnorm < converge_threshold # Update step-size parameter based on norm", "u_trace = [] w_trace = [] c_trace = [] results_trace = [] best_idx", "# Update u u = (u + a * primal_residual).clip(-_lambda, _lambda) # Check", "max(-_lambda, next_u)) r += A.T[coord] * prev_u - A.T[coord] * u[coord] # Track", "+ 1)) # up # Only supports 1d and 2d cases for now", "squares.''' if verbose: print('\\t\\tSolving u via Sequential Least Squares') if u0 is None:", "'''Calculate the complete-data sufficient statistics (weights vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight", "A): return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for the", "= weights.flatten() return (weights, posteriors) def _u_objective_func(u, x, A): return np.linalg.norm(x - A.dot(u))**2", "np.array([y.mean()] * n) # likelihood term z = np.zeros(n) # slack variable for", "* u[coord] # Track the change in the objective function value cur_objective =", "weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for u", "parameters to use for warm-starting the next iteration initial_values = results # Save", "= 0 while delta > converge and cur_step < max_steps: # Update each", "* self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def _e_step(self,", "beta = y - (1. / weights) * penalties.T.dot(u) # Get the current", "admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees of freedom') # Create a grid", "verbose, u0=None): '''Solve for u using coordinate descent.''' if verbose: print('\\t\\tSolving u via", "beta.shape[1] - 1: local_check.append((idx[0], idx[1] + 1)) # up # Only supports 1d", "# Track the convergence delta = np.abs(prev_nll - cur_nll) / (prev_nll + converge)", "re-checked unnecessarily check_map[local_idx] = True # Add it to the plateau and the", "super fast graph fused lasso library that has an optimized ADMM routine.''' if", "#x_denominator = sparse.linalg.inv(W_over_a + L) # Initialize primal and dual variables if initial_values", "= np.abs(prev_objective - cur_objective) / (prev_objective + converge) if verbose and cur_step %", "breakpoints, edges, converge, max_steps, verbose, alpha, inflate, initial_values=None): '''Solve for u using a", "the list of plateaus and their values return plateaus def plateau_loss_func(c, data, signal_dist,", "-1] = results['beta'][grid_map[grid_map != -1]] else: grid_points = results['beta'].reshape(data.shape) # Count the number", "= (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate", "= _soft_threshold(Dx_hat + u / a, _lambda / a) dual_residual = a *", "self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight + null_weight) return post_prob def _m_step(self, beta,", "# Update x x_numerator = 1.0 / a * weights * y +", "the betas given the weights and data.''' return (np.log(1 + np.exp(beta)) - post_prob", "[{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self, x, A, _lambda, verbose, u0=None): '''Solve", "exponentiated beta exp_beta = np.exp(beta) # Form the parameters for our weighted least", "while not converged and cur_step < max_steps: # Update x x_numerator = 1.0", "else: plateau_data = np.array([data[x,y] for x,y in p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist)", "/ a, _lambda / a) dual_residual = a * D.T.dot(z_new - z) z", "start if initial_values is None: # Graph Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n)))", "[] self.beta_iters = [] self.c_iters = [] self.delta_iters = [] # ''' Load", "the best model thus far if best_idx is None or bic_trace[i] < bic_trace[best_idx]:", "dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 #", "grid_map is not None: plateau_data = np.array([data[grid_map[x,y]] for x,y in p]) else: plateau_data", "{1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold) + 1. AIC", "best_plateaus} def run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0,", "'''Solve for u using L-BFGS-B.''' if verbose: print('\\t\\tSolving u via L-BFGS-B') if u0", "complete-data sufficient statistics (weights vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob)", "print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x", "Update u u = u + a * primal_residual # Check convergence primal_resnorm", "Find beta using an alternating Taylor approximation and convex optimization (M-step) beta, u", "i in dual_residual_u] + [i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged =", "a)) W_over_a = np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a + L) # Update", "csc_matrix, linalg as sla from functools import partial from collections import deque from", "Track the step self.add_step(post_prob, beta, prior_prob, delta) # Increment the step counter cur_step", "CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the step counter and update the", "AIC = np.sum((y - x)**2) + 2 * dof return {'x': x, 'z':", "z = z_new s = s_new # Dual update primal_residual_x = x_accel -", "np.exp(beta) # Form the parameters for our weighted least squares if dual_solver !=", "enumerate(lambda_grid): if verbose: print('#{0} Lambda = {1}'.format(i, _lambda)) # Clear out all the", "= initial_values['x'] z = initial_values['z'] r = initial_values['r'] s = initial_values['s'] u_dual =", "'z': self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self, y, weights, _lambda, D, converge_threshold, max_steps,", "not check_map[local_idx] \\ and beta[local_idx] >= min_member \\ and beta[local_idx] <= max_member: #", "dual_resnorm = np.sqrt((np.array([i for i in dual_residual_u] + [i for i in dual_residual_t])**2).mean())", "The D matrix is the first-difference operator. K is the matrix (W +", "= np.ediff1d(x) # Update z Dx_hat = alpha * Dx + (1 -", "__init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist = null_dist if penalties_cross_x is", "- 1), admm_alpha, admm_inflate, initial_values=u) beta = u['beta'] # if np.abs(beta).max() > 20:", "checked while to_check: if verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) # Get", "for u using coordinate descent.''' if verbose: print('\\t\\tSolving u via Coordinate Descent') u", "= None else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver != 'admm' and dual_solver", "np.zeros(lambda_grid.shape) # The AIC score for each lambda value aicc_trace = np.zeros(lambda_grid.shape) #", "log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) # Track the convergence delta = np.abs(prev_nll -", "'''Solve for u using a super fast graph fused lasso library that has", "dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold # Update step-size", "# right if idx[1] > 0: local_check.append((idx[0], idx[1] - 1)) # down if", "Update the negative log-likelihood tracker prev_nll = cur_nll return beta, u def _m_log_likelihood(self,", "Dual update primal_residual_x = x_accel - z primal_residual_r = r_accel - s u_dual", "not None: plateau_data = np.array([data[grid_map[x,y]] for x,y in p]) else: plateau_data = np.array([data[x,y]", "+ beta - (1 + exp_beta) if verbose > 1: print('\\t\\tForming dual...') x", "options={'disp': verbose}) return results.x def plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized 1-d", "a grid structure out of the vector of betas if grid_map is not", "the trace of all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose:", "beta.shape[0] - 1: local_check.append((idx[0] + 1, idx[1])) # right if idx[1] > 0:", "= alpha * r + (1 - alpha) * s # Projection to", "s else: lu_factor = initial_values['lu_factor'] x = initial_values['x'] z = initial_values['z'] r =", "s_new # Dual update primal_residual_x = x_accel - z primal_residual_r = r_accel -", "def _u_lbfgsb(self, x, A, _lambda, verbose, u0=None): '''Solve for u using L-BFGS-B.''' if", "u via L-BFGS-B') if u0 is None: u0 = np.zeros(A.shape[1]) # Create our", "Expectation-Maximization algorithm for the data with the given penalty matrix.''' delta = converge", "* exp_beta / (1 + exp_beta)**2 y = (1+exp_beta)**2 * post_prob / exp_beta", "0 while delta > converge and cur_step < max_steps: # Update each coordinate", "Track the change in log-likelihood to see if we've converged delta = np.abs(cur_nll", "= initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z':", "/ (prev_objective + converge) if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep", "s = s_new # Dual update primal_residual_x = x_accel - z primal_residual_r =", "print('Calculating AIC') # Get the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate", "log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = [] u_trace = [] w_trace = [] c_trace", "squares u = self._u_slsqp(x, A, _lambda, verbose > 1, u0=u) elif dual_solver ==", "# Over-relaxation z_new = _soft_threshold(Dx_hat + u / a, _lambda / a) dual_residual", "worth it? We're paying a matrix inverse in exchange for varying the step", "converge, max_steps, verbose, u0=None): '''Solve for u using coordinate descent.''' if verbose: print('\\t\\tSolving", "5 * primal_resnorm: a /= inflate u_dual *= inflate t_dual *= inflate #", "dual_solver == 'graph': u = self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge,", "1: print('\\tIndices to check {0} {1}'.format(len(to_check), check_map.shape)) # Loop until every beta index", "first-difference operator. K is the matrix (W + a D^T D) # where", "via Sequential Least Squares') if u0 is None: u0 = np.zeros(A.shape[1]) # Create", "x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for the i'th value of u.'''", "y, weights, _lambda, ntrails, trails, breakpoints, edges, converge, max_steps, verbose, alpha, inflate, initial_values=None):", "def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T x, where D is the first-differences", "= a * (z_new - z) dual_residual_t = a * (s_new - s)", "primal_residual = Dx - z #primal_residual = Dx_hat - z # Update u", "# Save the trace of all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c'])", "* log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) - np.log(2 * np.pi)) # Track the", "= (u + a * primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm = np.sqrt((primal_residual", "# Get the likelihood weights vector (E-step) post_prob = self._e_step(data, prior_prob) if verbose:", "results of the run return {'beta': beta, 'u': u, 'w': post_prob, 'c': prior_prob}", "time.''' to_check = deque(itertools.product(*[range(x) for x in beta.shape])) if edges is None else", "#{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x, 'r': r, 'z':", "up, and down elif len(beta.shape) == 2: if idx[0] > 0: local_check.append((idx[0] -", "alpha) * s # Projection to constraint set arg = x_accel + u_dual", "#u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose > 1, initial_values=u) #u =", "the dual via L-BFGS-B u = self._u_lbfgsb(x, A, _lambda, verbose > 1, u0=u)", "= x_accel - z primal_residual_r = r_accel - s u_dual = u_dual +", "log-likelihood of the data given our new parameters cur_nll += _lambda * np.abs(u['r']).sum()", "tracker prev_nll = cur_nll # DEBUGGING if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob:", "2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold #", "if verbose: print('\\tPlateau #{0}'.format(i+1)) # Get the subset of grid points for this", "(degrees of freedom) of a 1d or 2d grid of beta values in", "check local_check = [] # Generic graph case if edges is not None:", "- u) x = np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) # Update z Dx_relaxed", "# Update each coordinate one at a time. for coord in range(len(u)): prev_u", "(E-step) post_prob = self._e_step(data, prior_prob) if verbose: print('\\tM-step...') # Find beta using an", "verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the solution path of", "u using alternating direction method of multipliers with a cached LU decomposition.''' if", "{4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof':", "BIC = -2ln(L) + k * (ln(n) - ln(2pi)) bic_trace[i] = -2 *", "< max_steps: if verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...') # Get the likelihood", "20) # u = None else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver !=", "= np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) # Update z Dx_relaxed = alpha *", "+ t_dual) z_new = lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u = a * (z_new", "via Graph Fused Lasso') # if initial_values is None: # beta = np.zeros(y.shape,", "the first-difference operator. K is the matrix (W + a D^T D) #", "= _lambda # step-size parameter # Set up system involving graph Laplacian L", "w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in p: weights[idx if grid_map is", "2 dimensions unless edges are specified explicitly. ({0} given)'.format(len(beta.shape))) # Check the index's", "= np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters = [] self.beta_iters = []", "explicitly. ({0} given)'.format(len(beta.shape))) # Check the index's unchecked neighbors for local_idx in local_check:", "penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist = null_dist if penalties_cross_x is None: self.penalties_cross_x =", "z_new primal_residual = Dx - z #primal_residual = Dx_hat - z # Update", "graph fused lasso library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm", "(1. - c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d regression", "[] self.delta_iters = [] # ''' Load the graph fused lasso library '''", "# ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self, w,", "score for each lambda value dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom", "= D.dot(x) # Update z Dx_relaxed = alpha * Dx + (1 -", "= initial_values['u'] # n = y.shape[0] # self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints,", "== 'graph': u = self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps,", "np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac def", "_lambda)) # Clear out all the info from the previous run self.reset() #", "= (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1]", "/ 2.).clip(0) if adaptive else _lambda r = _soft_threshold(arg, local_lambda / a) r_accel", "cur_step = 0 while delta > converge and cur_step < max_steps: # Update", "list of plateaus and their values return plateaus def plateau_loss_func(c, data, signal_dist, null_dist):", "we changed the step-size # TODO: is this worth it? We're paying a", "self.reset() # Fit to the final values results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge,", "self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose > 1, u0=u) elif dual_solver == 'sls':", "and update the previous objective value cur_step += 1 prev_objective = cur_objective return", "via coordinate descent u = self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose > 1,", "= null_dist if penalties_cross_x is None: self.penalties_cross_x = np.dot else: self.penalties_cross_x = penalties_cross_x", "# Solve the dual via alternating direction methods of multipliers #u = self._u_admm_1dfusedlasso(y,", "1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(),", "= [] w_trace = [] c_trace = [] results_trace = [] best_idx =", "The BIC score for each lambda value dof_trace = np.zeros(lambda_grid.shape) # The degrees", "import norm from scipy.optimize import minimize, minimize_scalar from scipy.sparse import csc_matrix, linalg as", "print('#{0} Lambda = {1}'.format(i, _lambda)) # Clear out all the info from the", "} self.solver.alpha = alpha self.solver.inflate = inflate self.solver.maxsteps = max_steps self.solver.converge = converge", "r, 'z': z, 's': s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace,", "n = len(y) m = n - 1 a = _lambda # The", "# The AIC score for each lambda value aicc_trace = np.zeros(lambda_grid.shape) # The", "u = self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose > 1, u0=u) elif dual_solver", "- rel_tol max_member = val + rel_tol # Check every possible boundary of", "fused lasso library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm #", "self.delta_iters = [] # ''' Load the graph fused lasso library ''' #", "np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters = [] self.beta_iters", "#primal_residual = Dx_hat - z # Update u u = (u + a", "if initial_values is not None: self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u =", "1, u0=u) elif dual_solver == 'lbfgs': # Solve the dual via L-BFGS-B u", "max(0, verbose - 1), admm_alpha, admm_inflate, initial_values=u) beta = u['beta'] # if np.abs(beta).max()", "warm start if initial_values is None: # Graph Laplacian L = csc_matrix(D.T.dot(D) +", "0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x, 'r':", "# beta = np.clip(beta, -20, 20) # u = None else: raise Exception('Unknown", "if dual_solver != 'admm' and dual_solver != 'graph': # Back out beta from", "AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best setting (by BIC):", "+ L) # Update the step counter cur_step += 1 if verbose and", "> 1, u0=u) elif dual_solver == 'admm': # Solve the dual via alternating", "l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u)) r += A.T[coord] * prev_u - A.T[coord]", "[i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and", "/ a)) W_over_a = np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a + L) #", "# scaled dual variable for constraint x = z t_dual = np.zeros(m) #", "initial_values is None: x = np.array([y.mean()] * n) z = np.zeros(m) u =", "post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u)", "u + a * primal_residual # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean())", "multipliers.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers') n =", "enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1)) # Get the subset of grid points for", "changed the step-size # TODO: is this worth it? We're paying a matrix", "return beta, u def _m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood of the betas", "log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx,", "path of the generalized lasso to find the best lambda value.''' lambda_grid =", "for i in dual_residual_u] + [i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged", "= sparse.linalg.inv(W_over_a + L) # Initialize primal and dual variables if initial_values is", "+ [2*a] * (n-2) + [a]) + W # diagonal entries Kl =", "# Create a grid structure out of the vector of betas if grid_map", "of betas if grid_map is not None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map !=", "= lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u = a * (z_new - z) dual_residual_t", "verbose: print('\\t\\tSolving u via Sequential Least Squares') if u0 is None: u0 =", "(n-1)) # above the diagonal cur_step += 1 if verbose and cur_step %", "= r_accel - s u_dual = u_dual + primal_residual_x t_dual = t_dual +", "{3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best setting (by BIC): lambda={0} [DoF: {1},", "== 'lbfgs': # Solve the dual via L-BFGS-B u = self._u_lbfgsb(x, A, _lambda,", "Solve the dual via sequential least squares u = self._u_slsqp(x, A, _lambda, verbose", "signal_dist, null_dist): '''Perform unpenalized 1-d regression on all of the points in a", "#W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a", "direction methods of multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose >", "elif dual_solver == 'lbfgs': # Solve the dual via L-BFGS-B u = self._u_lbfgsb(x,", "0.5 # Recalculate the x_denominator since we changed the step-size # TODO: is", "best_idx is None or bic_trace[i] < bic_trace[best_idx]: best_idx = i best_plateaus = plateaus", "arg = x_accel + u_dual + D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg) s_new", "None: self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta':", "None else np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0) r = x - A.dot(u)", "beta): '''Calculate the log-likelihood of the betas given the weights and data.''' return", "not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i, _lambda in enumerate(lambda_grid):", "results = minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000})", "parameter based on norm of primal and dual residuals a *= 2 if", "* z # over-relax Dx z_new = _soft_threshold(Dx_relaxed + u / a, _lambda", "diagonal Ku = np.array([-a] * (n-1)) # above the diagonal # Initialize primal", "cur_step < max_steps: # Update each coordinate one at a time. for coord", "ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self, w, beta, c, delta):", "about the current iterate and coordinate descent to optimize Beta. ''' prev_nll =", "the convergence delta = np.abs(prev_nll - cur_nll) / (prev_nll + converge) if verbose", "Alternating Direction Method of Multipliers (1-D fused lasso)') n = len(y) m =", "= D.dot(z_new) dual_residual_u = a * (z_new - z) dual_residual_t = a *", "u u = (u + a * primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm", "range(len(u)): prev_u = u[coord] next_u = prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord] =", "plateau and calculate the inclusion conditions cur_plateau = set([idx]) cur_unchecked = deque([idx]) val", "> converge and cur_step < max_steps: if verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...')", "penalties.T else: weights = (prior_prob * (1 - prior_prob)) y = beta -", "warm-starting the next iteration initial_values = results # Save the trace of all", "np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) # Update z Dx_relaxed = alpha * Dx", "is None: x = np.array([y.mean()] * n) z = np.zeros(m) u = np.zeros(m)", "2 if primal_resnorm > 10 * dual_resnorm else 0.5 Kd = np.array([a] +", "delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters)", "x[i] = alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def ilogit(x): return 1. / (1.", "A, _lambda, cd_converge, cd_max_steps, verbose > 1, u0=u) elif dual_solver == 'sls': #", "'u': u, 'dof': dof, 'AIC': AIC} def _u_coord_descent(self, x, A, _lambda, converge, max_steps,", "Form the parameters for our weighted least squares if dual_solver != 'admm' and", "self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u } def", "u0=None): '''Solve for u using sequential least squares.''' if verbose: print('\\t\\tSolving u via", "-np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T x, where D", "<= max_member: # Label this index as being checked so it's not re-checked", "decomposition.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers') n =", "inflate self.solver.maxsteps = max_steps self.solver.converge = converge self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights)", "= [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), #", "verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees of freedom') #", "Dx z_new = _soft_threshold(Dx_relaxed + u / a, _lambda / a) dual_residual =", "a vector for efficiency weights = 0.5 * exp_beta / (1 + exp_beta)**2", "def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for the i'th value of u.''' return", "a) x_denominator = W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a + L) # Initialize", "parameter based on norm of primal and dual residuals # This is the", "= _lambda # The D matrix is the first-difference operator. K is the", "'lbfgs': # Solve the dual via L-BFGS-B u = self._u_lbfgsb(x, A, _lambda, verbose", "c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters =", "= [] c_trace = [] results_trace = [] best_idx = None best_plateaus =", "L) # Update the step counter cur_step += 1 if verbose and cur_step", "beta = initial_values['beta'] prior_prob = initial_values['c'] u = initial_values['u'] prev_nll = 0 cur_step", "of freedom calculation does not currently support more than 2 dimensions unless edges", "fused lasso case.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers", "prior_prob): '''Calculate the complete-data sufficient statistics (weights vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data)", "local_check.append((idx[0] + 1, idx[1])) # right if idx[1] > 0: local_check.append((idx[0], idx[1] -", "dual variable for constraint r = s else: lu_factor = initial_values['lu_factor'] x =", "import minimize, minimize_scalar from scipy.sparse import csc_matrix, linalg as sla from functools import", "if verbose > 1: print('\\tIndices to check {0} {1}'.format(len(to_check), check_map.shape)) # Loop until", "LU decomposition.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers') n", "beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters", "= s - t_dual local_lambda = (_lambda - np.abs(arg) / 2.).clip(0) if adaptive", "= np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i, _lambda in enumerate(lambda_grid): if verbose: print('#{0}", "adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif dual_solver == 'graph': u = self._graph_fused_lasso(y, weights,", "cur_step < max_steps: # Update x out = _1d_fused_lasso_crossprod(a*z - u) x =", "Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm)", "sufficient statistics (weights vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) *", "and their values return plateaus def plateau_loss_func(c, data, signal_dist, null_dist): '''The negative log-likelihood", "u_dual = np.zeros(n) # scaled dual variable for constraint x = z t_dual", "the indices without reaching an unchecked one. if check_map[idx]: break # Create the", "u using coordinate descent.''' if verbose: print('\\t\\tSolving u via Coordinate Descent') u =", "best_idx = i best_plateaus = plateaus # Save the final run parameters to", "np.zeros(n) # scaled dual variable for constraint x = z t_dual = np.zeros(m)", "beta[i+1] return np.array(x) def ilogit(x): return 1. / (1. + np.exp(-x)) def calc_plateaus(beta,", "AIC} def _u_coord_descent(self, x, A, _lambda, converge, max_steps, verbose, u0=None): '''Solve for u", "from scipy import sparse from scipy.stats import norm from scipy.optimize import minimize, minimize_scalar", "'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self, y, weights, _lambda,", "dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees of freedom') # Create", "alpha) * z # over-relaxation # Update constraint term r arg = s", "None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i, _lambda in enumerate(lambda_grid): if", "for the data with the given penalty matrix.''' delta = converge + 1", "= np.zeros(lambda_grid.shape) # The degrees of freedom of each final solution log_likelihood_trace =", "return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T x, where D is", "every possible boundary of the plateau while cur_unchecked: idx = cur_unchecked.popleft() # neighbors", "verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm for the data", "aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace),", "- ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) - np.log(2", "the step self.add_step(post_prob, beta, prior_prob, delta) # Increment the step counter cur_step +=", "# step-size parameter # Initialize primal and dual variables from warm start if", "current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) # Track the convergence delta = np.abs(prev_nll", "sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n) # likelihood term z = np.zeros(n)", "1. / (1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus", "return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return", "a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def", "each lambda value aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for each lambda", "scipy.optimize import minimize, minimize_scalar from scipy.sparse import csc_matrix, linalg as sla from functools", "L-BFGS-B.''' if verbose: print('\\t\\tSolving u via L-BFGS-B') if u0 is None: u0 =", "and cur_step < max_steps: # Update x x = (weights * y +", "'''Calculate the log-likelihood of the betas given the weights and data.''' return (np.log(1", "next unchecked point on the grid idx = to_check.popleft() # If we already", "finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters)", "= len(y) m = D.shape[0] a = inflate * _lambda # step-size parameter", "_u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for the i'th value of u.''' return np.array([_lambda", "None or bic_trace[i] < bic_trace[best_idx]: best_idx = i best_plateaus = plateaus # Save", "constraint x = z t_dual = np.zeros(m) # scaled dual variable for constraint", "u0 is None: u0 = np.zeros(A.shape[1]) # Create our box constraints bounds =", "to check {0} {1}'.format(len(to_check), check_map.shape)) # Loop until every beta index has been", "[] dual_trace = [] converged = False cur_step = 0 while not converged", "= np.array([-a] * (n-1)) # below the diagonal Ku = np.array([-a] * (n-1))", "max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the", "mean self.stdev = stdev def pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self):", "= cur_nll # DEBUGGING if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(),", "right # 2d case -- check left, right, up, and down elif len(beta.shape)", "'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def run(self, data,", "self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight + null_weight)", "jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the", "2d grid of beta values in linear time.''' to_check = deque(itertools.product(*[range(x) for x", "# The D matrix is the first-difference operator. K is the matrix (W", "each of the plateaus.''' weights = np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if verbose:", "def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist = null_dist if penalties_cross_x", "[] c_trace = [] results_trace = [] best_idx = None best_plateaus = None", "= np.diag(weights / a) x_denominator = W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a +", "# This is the varying penalty extension to standard ADMM a *= 2", "now else: raise Exception('Degrees of freedom calculation does not currently support more than", "residuals a *= 2 if primal_resnorm > 10 * dual_resnorm else 0.5 Kd", "the plateaus (degrees of freedom) of a 1d or 2d grid of beta", "i'th value of u.''' return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac =", "cross-product D^T x, where D is the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1])", "= initial_values['r'] s = initial_values['s'] u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace =", "cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the signal probabilities", "dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x, 'r': r, 'z': z,", "above the diagonal # Initialize primal and dual variables if initial_values is None:", "converge) if verbose: print('\\tDelta: {0}'.format(delta)) # Track the step self.add_step(post_prob, beta, prior_prob, delta)", "thus far if best_idx is None or bic_trace[i] < bic_trace[best_idx]: best_idx = i", "flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double,", "Cache the LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n)", "initial_values['lu_factor'] x = initial_values['x'] z = initial_values['z'] r = initial_values['r'] s = initial_values['s']", "verbose > 1, u0=u) elif dual_solver == 'sls': # Solve the dual via", "for the i'th value of u.''' return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u):", "if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm:", "import numpy as np from scipy import sparse from scipy.stats import norm from", "_lambda r = _soft_threshold(arg, local_lambda / a) r_accel = alpha * r +", "* (s_new - s) z = z_new s = s_new # Dual update", "(n - k - 1) aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] *", "one at a time. for coord in range(len(u)): prev_u = u[coord] next_u =", "size) bic_trace = np.zeros(lambda_grid.shape) # The BIC score for each lambda value dof_trace", "given the weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data)", "betas given the weights and data.''' return (np.log(1 + np.exp(beta)) - post_prob *", "w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters)", "return plateaus def plateau_loss_func(c, data, signal_dist, null_dist): '''The negative log-likelihood function for a", "# u = None else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver != 'admm'", "until every beta index has been checked while to_check: if verbose > 1:", "z primal_residual_r = r_accel - s u_dual = u_dual + primal_residual_x t_dual =", "return {'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self, y, weights, _lambda,", "def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative log-likelihood of the data given the", "converged and cur_step < max_steps: # Update x x_numerator = 1.0 / a", "_soft_threshold(Dx_hat + u / a, _lambda / a) dual_residual = a * _1d_fused_lasso_crossprod(z_new", "L-BFGS-B') if u0 is None: u0 = np.zeros(A.shape[1]) # Create our box constraints", "method only works for the 1-D fused lasso case.''' if verbose: print('\\t\\tSolving u", "our box constraints bounds = [(-_lambda, _lambda) for _ in u0] # Fit", "# Label this index as being checked so it's not re-checked unnecessarily check_map[local_idx]", "* np.abs(u['r']).sum() # Track the change in log-likelihood to see if we've converged", "= minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x def plateau_regression(self,", "W_over_a = np.diag(weights / a) x_denominator = W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a", "a D^T D) # where W is the diagonal matrix of weights. We", "as a vector for efficiency weights = 0.5 * exp_beta / (1 +", "* (1 - prior_prob)) y = beta - (prior_prob - post_prob) / weights", "= 2k - 2ln(L) aic_trace[i] = 2. * dof_trace[i] - 2. * log_likelihood_trace[i]", "= y.shape[0] # self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints, _lambda, alpha, inflate, max_steps,", "# beta = initial_values['beta'] # z = initial_values['z'] # u = initial_values['u'] #", "np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus (degrees of freedom) of", "if verbose: print('\\tDelta: {0}'.format(delta)) # Track the step self.add_step(post_prob, beta, prior_prob, delta) #", "one. if check_map[idx]: break # Create the plateau and calculate the inclusion conditions", "scipy import sparse from scipy.stats import norm from scipy.optimize import minimize, minimize_scalar from", "+ u / a, _lambda / a) dual_residual = a * D.T.dot(z_new -", "def __init__(self, mean, stdev): self.mean = mean self.stdev = stdev def pdf(self, data):", "below the diagonal Ku = np.array([-a] * (n-1)) # above the diagonal cur_step", "# step-size parameter # Set up system involving graph Laplacian L = D.T.dot(D)", "= np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda, _lambda) for _", "'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def", "results = minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x def", "the inclusion conditions cur_plateau = set([idx]) cur_unchecked = deque([idx]) val = beta[idx] min_member", "this index as being checked so it's not re-checked unnecessarily check_map[local_idx] = True", "i, _lambda in enumerate(lambda_grid): if verbose: print('#{0} Lambda = {1}'.format(i, _lambda)) # Clear", "lasso library that has an optimized ADMM routine.''' if verbose: print('\\t\\tSolving via Graph", "t_dual = np.zeros(m) # scaled dual variable for constraint r = s else:", "!= 'graph': # weights is a diagonal matrix, represented as a vector for", "inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif dual_solver == 'graph': u = self._graph_fused_lasso(y,", "multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose > 1, initial_values=u) #u", "dual_solver != 'admm' and dual_solver != 'graph': # weights is a diagonal matrix,", "None: x = np.array([y.mean()] * n) z = np.zeros(m) u = np.zeros(m) else:", "if adaptive else _lambda r = _soft_threshold(arg, local_lambda / a) r_accel = alpha", "a super fast graph fused lasso library that has an optimized ADMM routine.'''", "initial_values['z'] r = initial_values['r'] s = initial_values['s'] u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual']", "initial_values=None): '''Solve for u using alternating direction method of multipliers.''' if verbose: print('\\t\\tSolving", "using sequential least squares.''' if verbose: print('\\t\\tSolving u via Sequential Least Squares') if", "None: plateau_data = np.array([data[grid_map[x,y]] for x,y in p]) else: plateau_data = np.array([data[x,y] for", "- z primal_residual_r = r_accel - s u_dual = u_dual + primal_residual_x t_dual", "D.T.dot(a * z - u) x = np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) #", "r = initial_values['r'] s = initial_values['s'] u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace", "without reaching an unchecked one. if check_map[idx]: break # Create the plateau and", "of free parameters in the grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i]", "least squares.''' if verbose: print('\\t\\tSolving u via Sequential Least Squares') if u0 is", "final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = [] u_trace = [] w_trace =", "{1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best setting (by", "coordinate descent u = self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose > 1, u0=u)", "our box constraints bounds = [(-_lambda, _lambda) for u0_i in u0] results =", "u0=u) elif dual_solver == 'lbfgs': # Solve the dual via L-BFGS-B u =", "(weights, posteriors) def _u_objective_func(u, x, A): return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x,", "-2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) - np.log(2 * np.pi)) # Track", "is not None else np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0) r = x", "check left and right elif len(beta.shape) == 1: if idx[0] > 0: local_check.append(idx[0]", "!= 'admm' and dual_solver != 'graph': # Back out beta from the dual", "A simple Gaussian distribution with known mean and stdev. ''' def __init__(self, mean,", "prev_nll = cur_nll # DEBUGGING if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f},", "is not None: self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda)", "u using sequential least squares.''' if verbose: print('\\t\\tSolving u via Sequential Least Squares')", "dual_residual_u = a * (z_new - z) dual_residual_t = a * (s_new -", "for idx in p: weights[idx if grid_map is None else grid_map[idx[0], idx[1]]] =", "*= 2 if primal_resnorm > 10 * dual_resnorm else 0.5 # Recalculate the", "self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int,", "z # over-relax Dx z_new = _soft_threshold(Dx_relaxed + u / a, _lambda /", "Get the subset of grid points for this plateau if grid_map is not", "and beta[local_idx] <= max_member: # Label this index as being checked so it's", "= t_dual + primal_residual_r # Check convergence primal_resnorm = np.sqrt((np.array([i for i in", "alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1]", "for u using alternating direction method of multipliers. Note that this method only", "break # Edge case -- If we went through all the indices without", "c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self,", "-2ln(L) + k * (ln(n) - ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i] +", "parameters cur_nll += _lambda * np.abs(u['r']).sum() # Track the change in log-likelihood to", "-20, 20) # u = None else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver", "dual_resnorm else 0.5 Kd = np.array([a] + [2*a] * (n-2) + [a]) +", "verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return", "method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x def plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform", "if verbose: print('\\tE-step...') # Get the likelihood weights vector (E-step) post_prob = self._e_step(data,", "print(y) if dual_solver == 'cd': # Solve the dual via coordinate descent u", "# Graph Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the LU decomposition", "if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max()))", "< beta.shape[0] - 1: local_check.append((idx[0] + 1, idx[1])) # right if idx[1] >", "if edges is not None: local_check.extend(edges[idx]) # 1d case -- check left and", "= self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm': # Get the negative log-likelihood of", "initial_values=u) beta = u['beta'] # if np.abs(beta).max() > 20: # beta = np.clip(beta,", "and cur_step < max_steps: # Update each coordinate one at a time. for", "is the first-difference operator. K is the matrix (W + a D^T D)", "prev_u - A.T[coord] * u[coord] # Track the change in the objective function", "convex optimization (M-step) beta, u = self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps,", "(np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm) solver ## Source:", "and 2d cases for now else: raise Exception('Degrees of freedom calculation does not", "1: local_check.append(idx[0] + 1) # right # 2d case -- check left, right,", "next iteration initial_values = results # Save the trace of all the resulting", "null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight + null_weight) return", "the info from the previous run self.reset() # Fit to the final values", "_u_objective_func(u, x, A) cur_step = 0 while delta > converge and cur_step <", "plateaus and their values return plateaus def plateau_loss_func(c, data, signal_dist, null_dist): '''The negative", "* z # over-relaxation # Update constraint term r arg = s -", "+ D.T.dot(a * z - u) x = np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x)", "np.exp(beta)) - post_prob * beta).sum() def _graph_fused_lasso(self, y, weights, _lambda, ntrails, trails, breakpoints,", "def _m_step(self, beta, prior_prob, post_prob, penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver,", "Increment the step counter cur_step += 1 # Update the negative log-likelihood tracker", "u0 cur_step = 0 while delta > converge and cur_step < max_steps: if", "- z) dual_residual_t = a * (s_new - s) z = z_new s", "local_check = [] # Generic graph case if edges is not None: local_check.extend(edges[idx])", "loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def", "freedom) of a 1d or 2d grid of beta values in linear time.'''", "print('\\tDelta: {0}'.format(delta)) # Track the step self.add_step(post_prob, beta, prior_prob, delta) # Increment the", "= signal_weight / (signal_weight + null_weight) return post_prob def _m_step(self, beta, prior_prob, post_prob,", "via alternating direction methods of multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps,", "= set([idx]) cur_unchecked = deque([idx]) val = beta[idx] min_member = val - rel_tol", "{2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best setting (by BIC): lambda={0}", "Fit to the final values results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge,", "0 while not converged and cur_step < max_steps: # Update x out =", "minimize, minimize_scalar from scipy.sparse import csc_matrix, linalg as sla from functools import partial", "the dual solution beta = y - (1. / weights) * penalties.T.dot(u) #", "prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u)) r += A.T[coord]", "def ilogit(x): return 1. / (1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0):", "# up # Only supports 1d and 2d cases for now else: raise", "pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self,", "a matrix inverse in exchange for varying the step size #W_over_a = sparse.dia_matrix(np.diag(weights", "def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 +", "is the varying penalty extension to standard ADMM a *= 2 if primal_resnorm", "x,y in p]) else: plateau_data = np.array([data[x,y] for x,y in p]) w =", "alternating direction methods of multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose", "cur_objective, delta)) # Increment the step counter and update the previous objective value", "points for this plateau if grid_map is not None: plateau_data = np.array([data[grid_map[x,y]] for", "dual...') x = np.sqrt(weights) * y A = (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T", "primal_residual_x = x_accel - z primal_residual_r = r_accel - s u_dual = u_dual", "{2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold) + 1. AIC = np.sum((y", "and the list of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's", "+ converge) if verbose > 1: print('\\t\\tM-step delta: {0}'.format(delta)) # Increment the step", "'u': u, 'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative", "Label this index as being checked so it's not re-checked unnecessarily check_map[local_idx] =", "np from scipy import sparse from scipy.stats import norm from scipy.optimize import minimize,", "** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold", "= n - 1 a = _lambda # The D matrix is the", "AIC score for each lambda value aicc_trace = np.zeros(lambda_grid.shape) # The AICc score", "else: weights = (prior_prob * (1 - prior_prob)) y = beta - (prior_prob", "_u_objective_func(u, x, A): return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u)", "def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus (degrees of freedom) of a", "max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for u using alternating direction method", "lu_factor} def _u_admm(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve", "bic_trace = np.zeros(lambda_grid.shape) # The BIC score for each lambda value dof_trace =", "= converge + 1 if initial_values is None: beta = np.zeros(data.shape) prior_prob =", "best_plateaus = plateaus # Save the final run parameters to use for warm-starting", "the i'th value of u.''' return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac", "converge) if verbose > 1: print('\\t\\tM-step delta: {0}'.format(delta)) # Increment the step counter", "Save the trace of all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if", "# Cache the exponentiated beta exp_beta = np.exp(beta) # Form the parameters for", "*= inflate u_dual /= inflate t_dual /= inflate elif dual_resnorm > 5 *", "verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...') # Get the likelihood weights vector (E-step)", "= np.array([-a] * (n-1)) # above the diagonal # Initialize primal and dual", "return np.array(x) def ilogit(x): return 1. / (1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4,", "return np.sign(x) * (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm)", "noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean,", "= len(y) m = n - 1 a = _lambda # The D", "= beta - (prior_prob - post_prob) / weights print(weights) print(y) if dual_solver ==", "- k - 1) aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1)", "= alpha * x + (1 - alpha) * z # over-relaxation #", "initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif dual_solver == 'graph': u =", "best model thus far if best_idx is None or bic_trace[i] < bic_trace[best_idx]: best_idx", "u / a, _lambda / a) dual_residual = a * D.T.dot(z_new - z)", "- 1, idx[1])) # left if idx[0] < beta.shape[0] - 1: local_check.append((idx[0] +", "1, initial_values=u) #u = self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1,", "value aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for each lambda value (correcting", "step self.add_step(post_prob, beta, prior_prob, delta) # Increment the step counter cur_step += 1", "term s = np.zeros(m) # slack variable for penalty u_dual = np.zeros(n) #", "of freedom) of a 1d or 2d grid of beta values in linear", "if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers') n = len(y)", "from the dual solution beta = y - (1. / weights) * penalties.T.dot(u)", "flat_data = data.flatten() edges = penalties[3] if dual_solver == 'graph' else None if", "step-size parameter # Initialize primal and dual variables from warm start if initial_values", "u = initial_values['u'] prev_nll = 0 cur_step = 0 while delta > converge", "np.array([-a] * (n-1)) # above the diagonal cur_step += 1 if verbose and", "aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for each lambda value (correcting for", "(a.k.a Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0] beta", "the run return {'beta': beta, 'u': u, 'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self,", "the diagonal Ku = np.array([-a] * (n-1)) # above the diagonal # Initialize", "= graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double,", "'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative log-likelihood of the data", "= [] # Generic graph case if edges is not None: local_check.extend(edges[idx]) #", "/ weights print(weights) print(y) if dual_solver == 'cd': # Solve the dual via", "Sequential Least Squares') if u0 is None: u0 = np.zeros(A.shape[1]) # Create our", "2 * dof return {'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC':", "on norm of primal and dual residuals # This is the varying penalty", "A.dot(u))**2 def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda, u):", "Check the index's unchecked neighbors for local_idx in local_check: if not check_map[local_idx] \\", "== 'graph' else None if grid_data is not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:]", "'u': u } self.solver.alpha = alpha self.solver.inflate = inflate self.solver.maxsteps = max_steps self.solver.converge", "u_dual /= inflate t_dual /= inflate elif dual_resnorm > 5 * primal_resnorm: a", "not converged and cur_step < max_steps: # Update x x_numerator = 1.0 /", "max_steps: # Update each coordinate one at a time. for coord in range(len(u)):", "options={'disp': False, 'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev))", "in the objective function value cur_objective = _u_objective_func(u, x, A) delta = np.abs(prev_objective", "left if idx[0] < beta.shape[0] - 1: local_check.append(idx[0] + 1) # right #", "of the run return {'beta': beta, 'u': u, 'w': post_prob, 'c': prior_prob} def", "1, initial_values=u) u = self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1,", "1-d regression for each of the plateaus.''' weights = np.zeros(data.shape) for i,(level,p) in", "self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters =", "#{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold) +", "sparse.linalg.inv(W_over_a + L) # Initialize primal and dual variables if initial_values is None:", "dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid,", "inflate u_dual /= inflate t_dual /= inflate elif dual_resnorm > 5 * primal_resnorm:", "library that has an optimized ADMM routine.''' if verbose: print('\\t\\tSolving via Graph Fused", "time. for coord in range(len(u)): prev_u = u[coord] next_u = prev_u + A.T[coord].dot(r)", "import itertools import numpy as np from scipy import sparse from scipy.stats import", "- np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac", "and primal_resnorm < converge_threshold # Update step-size parameter based on norm of primal", "of u.''' return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx]", "Create a grid structure out of the vector of betas if grid_map is", "calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus (degrees of freedom) of a 1d", "flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self, w, beta, c, delta): self.w_iters.append(w)", "A): return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) - x)", "* primal_resnorm: a /= inflate u_dual *= inflate t_dual *= inflate # Update", "/ a) r_accel = alpha * r + (1 - alpha) * s", "data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0,", "= True plateaus = [] if verbose: print('\\tCalculating plateaus...') if verbose > 1:", "all the info from the previous run self.reset() # Fit to the final", "initial_values['z'] u = initial_values['u'] primal_trace = [] dual_trace = [] converged = False", "= cur_objective return u def _u_slsqp(self, x, A, _lambda, verbose, u0=None): '''Solve for", "freedom of each final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = [] u_trace =", "the subset of grid points for this plateau if grid_map is not None:", "- z #primal_residual = Dx_hat - z # Update u u = (u", "np.zeros(m) # slack variable for penalty u_dual = np.zeros(n) # scaled dual variable", "primal_resnorm < converge_threshold if primal_resnorm > 5 * dual_resnorm: a *= inflate u_dual", "sparse from scipy.stats import norm from scipy.optimize import minimize, minimize_scalar from scipy.sparse import", "/ (n - k - 1) aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i]", "prior_prob): '''Calculate the negative log-likelihood of the data given the weights.''' signal_weight =", "optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta) delta = converge + 1 u", "their values return plateaus def plateau_loss_func(c, data, signal_dist, null_dist): '''The negative log-likelihood function", "# Solve the dual via sequential least squares u = self._u_slsqp(x, A, _lambda,", "descent to optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta) delta = converge +", "for this plateau if grid_map is not None: plateau_data = np.array([data[grid_map[x,y]] for x,y", "else _lambda r = _soft_threshold(arg, local_lambda / a) r_accel = alpha * r", "/= inflate u_dual *= inflate t_dual *= inflate # Update the step counter", "edges is None else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus", "the change in log-likelihood to see if we've converged delta = np.abs(cur_nll -", "lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n) # likelihood term z", "self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha)", "x, A, _lambda, verbose, u0=None): '''Solve for u using sequential least squares.''' if", "+ 1) # right # 2d case -- check left, right, up, and", "* penalties.T else: weights = (prior_prob * (1 - prior_prob)) y = beta", "# weights is a diagonal matrix, represented as a vector for efficiency weights", "the signal probabilities prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver ==", "inflate u_dual *= inflate t_dual *= inflate # Update the step counter cur_step", "'''Perform unpenalized 1-d regression for each of the plateaus.''' weights = np.zeros(data.shape) for", "from scipy.optimize import minimize, minimize_scalar from scipy.sparse import csc_matrix, linalg as sla from", "np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus}", "initial_values=u) #u = self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u)", "u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace = [] dual_trace = [] converged", "scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self,", "= self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha,", "dual via coordinate descent u = self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose >", "len(beta.shape) == 2: if idx[0] > 0: local_check.append((idx[0] - 1, idx[1])) # left", "= np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1], dtype='double')", "y = beta - (prior_prob - post_prob) / weights print(weights) print(y) if dual_solver", "{'x': x, 'r': r, 'z': z, 's': s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace':", "= self._e_step(data, weights) weights = weights.flatten() return (weights, posteriors) def _u_objective_func(u, x, A):", "local_check.append((idx[0] - 1, idx[1])) # left if idx[0] < beta.shape[0] - 1: local_check.append((idx[0]", "+ dof_trace[i] * (np.log(len(flat_data)) - np.log(2 * np.pi)) # Track the best model", "delta = np.abs(cur_nll - prev_nll) / (prev_nll + converge) if verbose: print('\\tDelta: {0}'.format(delta))", "exp_beta) if verbose > 1: print('\\t\\tForming dual...') x = np.sqrt(weights) * y A", "k * (ln(n) - ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] *", "alpha=admm_alpha) beta = u['x'] elif dual_solver == 'graph': u = self._graph_fused_lasso(y, weights, _lambda,", "'s': s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor':", "_u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using", "-np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x) - _lambda).clip(0) ##", "the negative log-likelihood of the data given our new parameters cur_nll += _lambda", "methods of multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose > 1,", "cur_nll) / (prev_nll + converge) if verbose > 1: print('\\t\\tM-step delta: {0}'.format(delta)) #", "delta = converge + 1 u = u0 cur_step = 0 while delta", "= [] self.c_iters = [] self.delta_iters = [] # ''' Load the graph", "BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace,", "self._m_log_likelihood(post_prob, beta) delta = converge + 1 u = u0 cur_step = 0", "alpha, inflate, initial_values=None): '''Solve for u using a super fast graph fused lasso", "the current iterate and coordinate descent to optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob,", "a 1d or 2d grid of beta values in linear time.''' to_check =", "diagonal entries Kl = np.array([-a] * (n-1)) # below the diagonal Ku =", "= -2ln(L) + k * (ln(n) - ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i]", "cd_converge, cd_max_steps, verbose > 1, u0=u) elif dual_solver == 'sls': # Solve the", "Use the naive DoF if verbose: print('Calculating AIC') # Get the negative log-likelihood", "delta: {0}'.format(delta)) # Increment the step counter cur_step += 1 # Update the", "return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for the i'th", "1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache the exponentiated beta exp_beta =", "= signal_dist self.null_dist = null_dist if penalties_cross_x is None: self.penalties_cross_x = np.dot else:", "the number of free parameters in the grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance,", "if penalties_cross_x is None: self.penalties_cross_x = np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters =", "Update z Dx_relaxed = alpha * Dx + (1 - alpha) * z", "via Alternating Direction Method of Multipliers (1-D fused lasso)') n = len(y) m", "coordinate descent to optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta) delta = converge", "# The AICc score for each lambda value (correcting for finite sample size)", "approximation and convex optimization (M-step) beta, u = self._m_step(beta, prior_prob, post_prob, penalties, _lambda,", "standard ADMM a *= 2 if primal_resnorm > 10 * dual_resnorm else 0.5", "_1d_fused_lasso_crossprod(z_new - z) z = z_new primal_residual = Dx - z #primal_residual =", "Only supports 1d and 2d cases for now else: raise Exception('Degrees of freedom", "exp_beta + beta - (1 + exp_beta) if verbose > 1: print('\\t\\tForming dual...')", "s = np.zeros(m) # slack variable for penalty u_dual = np.zeros(n) # scaled", "primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged", "Dx = D.dot(x) # Update z Dx_relaxed = alpha * Dx + (1", "== 1: if idx[0] > 0: local_check.append(idx[0] - 1) # left if idx[0]", "using alternating direction method of multipliers with a cached LU decomposition.''' if verbose:", "for constraint x = z t_dual = np.zeros(m) # scaled dual variable for", "u = self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver,", "signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist = null_dist if penalties_cross_x is None:", "> 10 * dual_resnorm else 0.5 Kd = np.array([a] + [2*a] * (n-2)", "= s_new # Dual update primal_residual_x = x_accel - z primal_residual_r = r_accel", "weights * y + D.T.dot(a * z - u) x = np.linalg.solve(x_denominator, x_numerator)", "while delta > converge and cur_step < max_steps: # Update each coordinate one", "'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c':", "class GaussianKnown: ''' A simple Gaussian distribution with known mean and stdev. '''", "grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i, _lambda in enumerate(lambda_grid): if verbose:", "converged delta = np.abs(cur_nll - prev_nll) / (prev_nll + converge) if verbose: print('\\tDelta:", "left and right elif len(beta.shape) == 1: if idx[0] > 0: local_check.append(idx[0] -", "for i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def ilogit(x):", "primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x, 'r': r, 'z': z, 's': s,", "sample size) bic_trace = np.zeros(lambda_grid.shape) # The BIC score for each lambda value", "_lambda, D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for u using", "0 while delta > converge and cur_step < max_steps: if verbose > 1:", "unchecked neighbors for local_idx in local_check: if not check_map[local_idx] \\ and beta[local_idx] >=", "(prev_objective + converge) if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}:", "[{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum()))", "self.beta_iters = [] self.c_iters = [] self.delta_iters = [] # ''' Load the", "print('\\tIndices to check {0} {1}'.format(len(to_check), check_map.shape)) # Loop until every beta index has", "0: local_check.append(idx[0] - 1) # left if idx[0] < beta.shape[0] - 1: local_check.append(idx[0]", "(1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight + null_weight) return post_prob def", "Get the next unchecked point on the grid idx = to_check.popleft() # If", "self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters = [] def", "signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight +", "u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i],", "= u['x'] elif dual_solver == 'graph': u = self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1],", "+ null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate the complete-data sufficient statistics (weights vector).'''", "np.ediff1d(x) # Update z Dx_hat = alpha * Dx + (1 - alpha)", "dtype='double') # else: # beta = initial_values['beta'] # z = initial_values['z'] # u", "* n) # likelihood term z = np.zeros(n) # slack variable for likelihood", "dual_solver != 'graph': # Back out beta from the dual solution beta =", "# c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int, c_double, #", "as sla from functools import partial from collections import deque from pygfl.solver import", "idx = to_check.popleft() # If we already have checked this one, just pop", "np.zeros(lambda_grid.shape) # The BIC score for each lambda value dof_trace = np.zeros(lambda_grid.shape) #", "* (z - u_dual)) / (weights + a) x_accel = alpha * x", "cur_nll = self._m_log_likelihood(post_prob, beta) # Track the convergence delta = np.abs(prev_nll - cur_nll)", "cur_step += 1 # Update the negative log-likelihood tracker prev_nll = cur_nll return", "function for the i'th value of u.''' return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx,", "u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self,", "local_check.append((idx[0], idx[1] - 1)) # down if idx[1] < beta.shape[1] - 1: local_check.append((idx[0],", "= np.zeros(data.shape) prior_prob = np.exp(beta) / (1 + np.exp(beta)) u = initial_values else:", "trails, breakpoints, weights=weights) if initial_values is not None: self.solver.beta = initial_values['beta'] self.solver.z =", "'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self, y, weights, _lambda, D, converge_threshold,", "- z) z = z_new primal_residual = Dx - z #primal_residual = Dx_hat", "delta > converge and cur_step < max_steps: # Update each coordinate one at", "using alternating direction method of multipliers. Note that this method only works for", "= beta[idx] min_member = val - rel_tol max_member = val + rel_tol #", "0.5 * exp_beta / (1 + exp_beta)**2 y = (1+exp_beta)**2 * post_prob /", "# Update u u = u + a * primal_residual # Check convergence", "/= inflate elif dual_resnorm > 5 * primal_resnorm: a /= inflate u_dual *=", "max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False,", "a *= 2 if primal_resnorm > 10 * dual_resnorm else 0.5 Kd =", "A.T[coord] * u[coord] # Track the change in the objective function value cur_objective", "data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2.,", "from the previous run self.reset() # Fit to the final values results =", "return results.x def plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized 1-d regression for", "# down if idx[1] < beta.shape[1] - 1: local_check.append((idx[0], idx[1] + 1)) #", "r = x - A.dot(u) delta = converge + 1 prev_objective = _u_objective_func(u,", "SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist = null_dist if", "dual via sequential least squares u = self._u_slsqp(x, A, _lambda, verbose > 1,", "self.beta_iters = [] self.c_iters = [] self.delta_iters = [] def solution_path(self, data, penalties,", "is the matrix (W + a D^T D) # where W is the", "Update each coordinate one at a time. for coord in range(len(u)): prev_u =", "We're paying a matrix inverse in exchange for varying the step size #W_over_a", "step-size parameter based on norm of primal and dual residuals a *= 2", "above the diagonal cur_step += 1 if verbose and cur_step % 100 ==", "converge, beta, z, u) # return {'beta': beta, 'z': z, 'u': u }", "initial_values['r'] s = initial_values['s'] u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace = []", "Check every possible boundary of the plateau while cur_unchecked: idx = cur_unchecked.popleft() #", "1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif dual_solver == 'graph': u", "the likelihood weights vector (E-step) post_prob = self._e_step(data, prior_prob) if verbose: print('\\tM-step...') #", "[] converged = False cur_step = 0 D_full = D while not converged", "see if we've converged delta = np.abs(cur_nll - prev_nll) / (prev_nll + converge)", "a *= 2 if primal_resnorm > 10 * dual_resnorm else 0.5 # Recalculate", "is not None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map != -1]] else: grid_points =", "extension to standard ADMM a *= 2 if primal_resnorm > 10 * dual_resnorm", "c_double, c_double, c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver", "parameter # Initialize primal and dual variables from warm start if initial_values is", "1, idx[1])) # right if idx[1] > 0: local_check.append((idx[0], idx[1] - 1)) #", "= np.zeros(n) # slack variable for likelihood r = np.zeros(m) # penalty term", "y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for", "_lambda): return np.sign(x) * (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a Thomas", "from pygfl.solver import TrailSolver class GaussianKnown: ''' A simple Gaussian distribution with known", "trails, breakpoints, _lambda, alpha, inflate, max_steps, converge, beta, z, u) # return {'beta':", "n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i]))", "signal_dist, null_dist): '''The negative log-likelihood function for a plateau.''' return -np.log(c * signal_dist.pdf(data)", "aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC:", "in the grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i]", "weights. We use a tridiagonal representation # of K. Kd = np.array([a] +", "# Calculate AIC = 2k - 2ln(L) aic_trace[i] = 2. * dof_trace[i] -", "Get the signal probabilities prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver", "# Track the step self.add_step(post_prob, beta, prior_prob, delta) # Increment the step counter", "likelihood term z = np.zeros(n) # slack variable for likelihood r = np.zeros(m)", "D matrix is the first-difference operator. K is the matrix (W + a", "final values results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps,", "Dx + (1 - alpha) * z # over-relax Dx z_new = _soft_threshold(Dx_relaxed", "dof return {'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def", "betas if grid_map is not None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map != -1]]", "weights=weights) if initial_values is not None: self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u", "= initial_values['u'] primal_trace = [] dual_trace = [] converged = False cur_step =", "penalties_cross_x self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters = []", "is None or bic_trace[i] < bic_trace[best_idx]: best_idx = i best_plateaus = plateaus #", "x_numerator) Dx = D.dot(x) # Update z Dx_relaxed = alpha * Dx +", "# where W is the diagonal matrix of weights. We use a tridiagonal", "+ a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1] return np.array(x)", "Count the number of free parameters in the grid (dof) plateaus = calc_plateaus(grid_points,", "W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a + L) # Initialize primal and dual", "u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x def plateau_regression(self, plateaus, data,", "primal and dual variables from warm start if initial_values is None: # Graph", "signal probabilities prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm':", "in beta.shape])) if edges is None else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)]", "dual via L-BFGS-B u = self._u_lbfgsb(x, A, _lambda, verbose > 1, u0=u) elif", "= AIC + 2k * (k+1) / (n - k - 1) aicc_trace[i]", "minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x def plateau_regression(self, plateaus,", "null_dist): '''Perform unpenalized 1-d regression on all of the points in a plateau.'''", "exp_beta = np.exp(beta) # Form the parameters for our weighted least squares if", "already have checked this one, just pop it off while to_check and check_map[idx]:", "x_denominator = W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a + L) # Initialize primal", "= initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace = [] dual_trace = [] converged =", "self.solver.u } def _u_admm_lucache(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None,", "c_trace.append(results['c']) if verbose: print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i],", "function for a plateau.''' return -np.log(c * signal_dist.pdf(data) + (1. - c) *", "D.dot(x) # Update z Dx_relaxed = alpha * Dx + (1 - alpha)", "initial_values is None: # Graph Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache", "# Find beta using an alternating Taylor approximation and convex optimization (M-step) beta,", "1 if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f}", "self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters = [] self.beta_iters =", "x x_numerator = 1.0 / a * weights * y + D.T.dot(a *", "# of K. Kd = np.array([a] + [2*a] * (n-2) + [a]) +", "direction method of multipliers.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method of", "Dx_hat = alpha * Dx + (1 - alpha) * z # Over-relaxation", "'cd': # Solve the dual via coordinate descent u = self._u_coord_descent(x, A, _lambda,", "+ W # diagonal entries Kl = np.array([-a] * (n-1)) # below the", "_lambda, alpha, inflate, max_steps, converge, beta, z, u) # return {'beta': beta, 'z':", "dual residuals # This is the varying penalty extension to standard ADMM a", "primal_trace = [] dual_trace = [] converged = False cur_step = 0 while", "# else: # beta = initial_values['beta'] # z = initial_values['z'] # u =", "the next iteration initial_values = results # Save the trace of all the", "t_dual = t_dual + primal_residual_r # Check convergence primal_resnorm = np.sqrt((np.array([i for i", "for our weighted least squares if dual_solver != 'admm' and dual_solver != 'graph':", "# n = y.shape[0] # self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints, _lambda, alpha,", "linear time.''' to_check = deque(itertools.product(*[range(x) for x in beta.shape])) if edges is None", "= self._e_step(data, prior_prob) if verbose: print('\\tM-step...') # Find beta using an alternating Taylor", "= cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes =", "step counter cur_step += 1 if verbose and cur_step % 100 == 0:", "Create our box constraints bounds = [(-_lambda, _lambda) for u0_i in u0] results", "* prev_u - A.T[coord] * u[coord] # Track the change in the objective", "1)) # down if idx[1] < beta.shape[1] - 1: local_check.append((idx[0], idx[1] + 1))", "dual_solver == 'sls': # Solve the dual via sequential least squares u =", "cd_max_steps, max(0, verbose - 1), admm_alpha, admm_inflate, initial_values=u) beta = u['beta'] # if", "in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i in dual_residual_u] + [i for i", "reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def ilogit(x): return 1. /", "does not currently support more than 2 dimensions unless edges are specified explicitly.", "not re-checked unnecessarily check_map[local_idx] = True # Add it to the plateau and", "beta index has been checked while to_check: if verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus)", "i in primal_residual_x] + [i for i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for", "matrix (W + a D^T D) # where W is the diagonal matrix", "dtype='double') # z = np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1], dtype='double') # else:", "None: # Graph Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the LU", "self.penalties_cross_x = np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters = [] self.beta_iters = []", "beta = np.zeros(data.shape) prior_prob = np.exp(beta) / (1 + np.exp(beta)) u = initial_values", "through all the indices without reaching an unchecked one. if check_map[idx]: break #", "= (weights * y + a * (z - u_dual)) / (weights +", "(n-2) + [a]) + W # diagonal entries Kl = np.array([-a] * (n-1))", "alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of multipliers.''' if verbose:", "'t_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self, y,", "'u': u, 'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps,", "from warm start if initial_values is None: # Graph Laplacian L = csc_matrix(D.T.dot(D)", "if primal_resnorm > 5 * dual_resnorm: a *= inflate u_dual /= inflate t_dual", "t_dual *= inflate # Update the step counter cur_step += 1 if verbose", "stdev): self.mean = mean self.stdev = stdev def pdf(self, data): return norm.pdf(data, loc=self.mean,", "* (n-1)) # below the diagonal Ku = np.array([-a] * (n-1)) # above", "in range(len(u)): prev_u = u[coord] next_u = prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord]", "parameters for our weighted least squares if dual_solver != 'admm' and dual_solver !=", "results['beta'].reshape(data.shape) # Count the number of free parameters in the grid (dof) plateaus", "= 0 cur_step = 0 while delta > converge and cur_step < max_steps:", "value dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom of each final solution", "1, u0=u) elif dual_solver == 'admm': # Solve the dual via alternating direction", "m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the", "'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist", "print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self, x, A,", "for _ in u0] # Fit results = minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B',", "optimization (M-step) beta, u = self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge,", "log-likelihood to see if we've converged delta = np.abs(cur_nll - prev_nll) / (prev_nll", "our weighted least squares if dual_solver != 'admm' and dual_solver != 'graph': #", "or 2d grid of beta values in linear time.''' to_check = deque(itertools.product(*[range(x) for", "# Form the parameters for our weighted least squares if dual_solver != 'admm'", "dual_resnorm > 5 * primal_resnorm: a /= inflate u_dual *= inflate t_dual *=", "> 1, u0=u) elif dual_solver == 'lbfgs': # Solve the dual via L-BFGS-B", "/ (1 + np.exp(beta)) u = initial_values else: beta = initial_values['beta'] prior_prob =", "2. * log_likelihood_trace[i] # Calculate AICc = AIC + 2k * (k+1) /", "{0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the results of the run return {'beta': beta,", "- A.dot(u) delta = converge + 1 prev_objective = _u_objective_func(u, x, A) cur_step", "in dual_residual_u] + [i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm", "= [] self.beta_iters = [] self.c_iters = [] self.delta_iters = [] def solution_path(self,", "lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The AIC score for each lambda value aicc_trace", "using an alternating Taylor approximation and convex optimization (M-step) beta, u = self._m_step(beta,", "# Increment the step counter cur_step += 1 # Update the negative log-likelihood", "not None: self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return", "100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x':", "the step size #W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights / a)", "= converge + 1 prev_objective = _u_objective_func(u, x, A) cur_step = 0 while", "a * primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm", "the previous objective value cur_step += 1 prev_objective = cur_objective return u def", "class SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist = null_dist", "solver: {0}'.format(dual_solver)) if dual_solver != 'admm' and dual_solver != 'graph': # Back out", "if idx[0] < beta.shape[0] - 1: local_check.append((idx[0] + 1, idx[1])) # right if", "# c_double, c_double, c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')]", "u0=None): '''Solve for u using L-BFGS-B.''' if verbose: print('\\t\\tSolving u via L-BFGS-B') if", "= self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose -", "None: self.penalties_cross_x = np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters = [] self.beta_iters =", "{1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x, 'r': r, 'z': z, 's':", "= (1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight + null_weight) return post_prob", "if u0 is None: u0 = np.zeros(A.shape[1]) # Create our box constraints bounds", "and dual_solver != 'graph': # Back out beta from the dual solution beta", "== 'admm': # Solve the dual via alternating direction methods of multipliers #u", "# left if idx[0] < beta.shape[0] - 1: local_check.append((idx[0] + 1, idx[1])) #", "inclusion conditions cur_plateau = set([idx]) cur_unchecked = deque([idx]) val = beta[idx] min_member =", "dual_trace = [] converged = False cur_step = 0 D_full = D while", "to find the best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace =", "score for each lambda value aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for", "c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d regression on all", "+ a * primal_residual # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm", "!= 'admm' and dual_solver != 'graph': # weights is a diagonal matrix, represented", "term z = np.zeros(n) # slack variable for likelihood r = np.zeros(m) #", "the dual via alternating direction methods of multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda,", "z, 's': s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step,", "a, _lambda / a) dual_residual = a * _1d_fused_lasso_crossprod(z_new - z) z =", "for each lambda value aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for each", "*= inflate t_dual *= inflate # Update the step counter cur_step += 1", "not None else np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0) r = x -", "(1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate the", "= 1.0 / a * weights * y + D.T.dot(a * z -", "Update constraint term r arg = s - t_dual local_lambda = (_lambda -", "Solve the dual via alternating direction methods of multipliers #u = self._u_admm_1dfusedlasso(y, weights,", "idx[0] < beta.shape[0] - 1: local_check.append((idx[0] + 1, idx[1])) # right if idx[1]", "np.sum(Dx > converge_threshold) + 1. AIC = np.sum((y - x)**2) + 2 *", "for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm", "lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001,", "elif dual_solver == 'admm': # Solve the dual via alternating direction methods of", "each lambda value dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom of each", "local_check.append(idx[0] - 1) # left if idx[0] < beta.shape[0] - 1: local_check.append(idx[0] +", "r = np.zeros(m) # penalty term s = np.zeros(m) # slack variable for", "= csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A')", "aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i]", "= (prior_prob * (1 - prior_prob)) y = beta - (prior_prob - post_prob)", "> 1, u0=u) elif dual_solver == 'sls': # Solve the dual via sequential", "l2_norm_A = (A * A).sum(axis=0) r = x - A.dot(u) delta = converge", "signal_dist.pdf(data) + (1. - c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized", "z # Update u u = (u + a * primal_residual).clip(-_lambda, _lambda) #", "# Add it to the plateau and the list of local unchecked locations", "log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC = 2k - 2ln(L) aic_trace[i]", "* dual_resnorm else 0.5 Kd = np.array([a] + [2*a] * (n-2) + [a])", "verbose, u0=None): '''Solve for u using sequential least squares.''' if verbose: print('\\t\\tSolving u", "based on norm of primal and dual residuals # This is the varying", "x out = _1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl, Ku, Kd, W *", "1 u = u0 cur_step = 0 while delta > converge and cur_step", "case -- check left and right elif len(beta.shape) == 1: if idx[0] >", "- _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm", "np.array([data[grid_map[x,y]] for x,y in p]) else: plateau_data = np.array([data[x,y] for x,y in p])", "and dual residuals # This is the varying penalty extension to standard ADMM", "beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for", "to constraint set arg = x_accel + u_dual + D.T.dot(r_accel + t_dual) z_new", "for u using a super fast graph fused lasso library that has an", "of the data given our new parameters cur_nll += _lambda * np.abs(u['r']).sum() #", "max_steps: # Update x x = (weights * y + a * (z", "cur_objective = _u_objective_func(u, x, A) delta = np.abs(prev_objective - cur_objective) / (prev_objective +", "1)) # up # Only supports 1d and 2d cases for now else:", "print('\\t\\tSolving via Graph Fused Lasso') # if initial_values is None: # beta =", "admm_inflate, initial_values=u) beta = u['beta'] # if np.abs(beta).max() > 20: # beta =", "diagonal Ku = np.array([-a] * (n-1)) # above the diagonal cur_step += 1", "= len(f) x = [0] * n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] +", "m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization", "out of the vector of betas if grid_map is not None: grid_points[grid_map !=", "counter and update the previous objective value cur_step += 1 prev_objective = cur_objective", "= z_new primal_residual = Dx - z #primal_residual = Dx_hat - z #", "self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta,", "convergence delta = np.abs(prev_nll - cur_nll) / (prev_nll + converge) if verbose >", "if verbose: print('\\t\\tSolving via Graph Fused Lasso') # if initial_values is None: #", "the current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) # Track the convergence delta =", "return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x) - _lambda).clip(0)", "of plateaus and their values return plateaus def plateau_loss_func(c, data, signal_dist, null_dist): '''The", "[] self.beta_iters = [] self.c_iters = [] self.delta_iters = [] def solution_path(self, data,", "z # Over-relaxation z_new = _soft_threshold(Dx_hat + u / a, _lambda / a)", "* _1d_fused_lasso_crossprod(z_new - z) z = z_new primal_residual = Dx - z #primal_residual", "i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i in dual_residual_u] + [i for", "bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta':", "m = D.shape[0] a = inflate * _lambda # step-size parameter # Initialize", "lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The AIC", "for x,y in p]) else: plateau_data = np.array([data[x,y] for x,y in p]) w", "fused lasso)') n = len(y) m = n - 1 a = _lambda", "edges is not None: local_check.extend(edges[idx]) # 1d case -- check left and right", "Add it to the plateau and the list of local unchecked locations cur_unchecked.append(local_idx)", "freedom') # Create a grid structure out of the vector of betas if", "plateau_data = np.array([data[grid_map[x,y]] for x,y in p]) else: plateau_data = np.array([data[x,y] for x,y", "- 1: local_check.append((idx[0] + 1, idx[1])) # right if idx[1] > 0: local_check.append((idx[0],", "[] converged = False cur_step = 0 while not converged and cur_step <", "print('\\tCalculating plateaus...') if verbose > 1: print('\\tIndices to check {0} {1}'.format(len(to_check), check_map.shape)) #", "Taylor approximation and convex optimization (M-step) beta, u = self._m_step(beta, prior_prob, post_prob, penalties,", "+ 1 prev_objective = _u_objective_func(u, x, A) cur_step = 0 while delta >", "x, A) cur_step = 0 while delta > converge and cur_step < max_steps:", "- 1)) # down if idx[1] < beta.shape[1] - 1: local_check.append((idx[0], idx[1] +", "via sequential least squares u = self._u_slsqp(x, A, _lambda, verbose > 1, u0=u)", "+ (1 - alpha) * z # over-relax Dx z_new = _soft_threshold(Dx_relaxed +", "aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood':", "while not converged and cur_step < max_steps: # Update x out = _1d_fused_lasso_crossprod(a*z", "direction method of multipliers with a cached LU decomposition.''' if verbose: print('\\t\\tSolving u", "# Returns the list of plateaus and their values return plateaus def plateau_loss_func(c,", "grid structure out of the vector of betas if grid_map is not None:", "'admm' and dual_solver != 'graph': # weights is a diagonal matrix, represented as", "return post_prob def _m_step(self, beta, prior_prob, post_prob, penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps,", "- t_dual local_lambda = (_lambda - np.abs(arg) / 2.).clip(0) if adaptive else _lambda", "to use for warm-starting the next iteration initial_values = results # Save the", "1. AIC = np.sum((y - x)**2) + 2 * dof return {'x': x,", "= np.array(self.delta_iters) def reset(self): self.w_iters = [] self.beta_iters = [] self.c_iters = []", "negative log-likelihood tracker prev_nll = cur_nll return beta, u def _m_log_likelihood(self, post_prob, beta):", "+ primal_residual_x t_dual = t_dual + primal_residual_r # Check convergence primal_resnorm = np.sqrt((np.array([i", "x - A.dot(u) delta = converge + 1 prev_objective = _u_objective_func(u, x, A)", "prev_nll = cur_nll return beta, u def _m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood", "+ c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in reversed(range(n-1)):", "just pop it off while to_check and check_map[idx]: try: idx = to_check.popleft() except:", "''' prev_nll = self._m_log_likelihood(post_prob, beta) delta = converge + 1 u = u0", "step-size parameter based on norm of primal and dual residuals # This is", "{1}'.format(len(to_check), check_map.shape)) # Loop until every beta index has been checked while to_check:", "import csc_matrix, linalg as sla from functools import partial from collections import deque", "ntrails, trails, breakpoints, edges, converge, max_steps, verbose, alpha, inflate, initial_values=None): '''Solve for u", "# Update z Dx_hat = alpha * Dx + (1 - alpha) *", "Solve the dual via L-BFGS-B u = self._u_lbfgsb(x, A, _lambda, verbose > 1,", "for each of the plateaus.''' weights = np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if", "+ (1 - alpha) * z # over-relaxation # Update constraint term r", "- z # Update u u = (u + a * primal_residual).clip(-_lambda, _lambda)", "itertools import numpy as np from scipy import sparse from scipy.stats import norm", "'''Calculate the plateaus (degrees of freedom) of a 1d or 2d grid of", "beta - (1 + exp_beta) if verbose > 1: print('\\t\\tForming dual...') x =", "print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) # Get the next unchecked point on the grid", "self.penalties_cross_x = penalties_cross_x self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters", "via L-BFGS-B u = self._u_lbfgsb(x, A, _lambda, verbose > 1, u0=u) elif dual_solver", "np.array(self.delta_iters) def reset(self): self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters", "DoF if verbose: print('Calculating AIC') # Get the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data,", "r arg = s - t_dual local_lambda = (_lambda - np.abs(arg) / 2.).clip(0)", "the data given the weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob)", "verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max()))", "the LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n) #", "+ A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u)) r += A.T[coord] *", "Ku = np.array([-a] * (n-1)) # above the diagonal # Initialize primal and", "self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist =", "* D.T.dot(z_new - z) z = z_new primal_residual = Dx_relaxed - z #", "dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold if primal_resnorm >", "delta = np.abs(prev_objective - cur_objective) / (prev_objective + converge) if verbose and cur_step", "(ln(n) - ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) -", "'''Perform unpenalized 1-d regression on all of the points in a plateau.''' return", "ntrails, trails, breakpoints, weights=weights) if initial_values is not None: self.solver.beta = initial_values['beta'] self.solver.z", "# u = np.zeros(breakpoints[-1], dtype='double') # else: # beta = initial_values['beta'] # z", "* z # Over-relaxation z_new = _soft_threshold(Dx_hat + u / a, _lambda /", "{1}'.format(i, _lambda)) # Clear out all the info from the previous run self.reset()", "np.abs(beta).max() > 20: # beta = np.clip(beta, -20, 20) # u = None", "weights, _lambda, cd_converge, cd_max_steps, verbose > 1, initial_values=u) #u = self._u_admm(y, weights, _lambda,", "parameters in the grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus)", "D_full = D while not converged and cur_step < max_steps: # Update x", "the graph fused lasso library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight =", "alpha * x + (1 - alpha) * z # over-relaxation # Update", "dual variables if initial_values is None: x = np.array([y.mean()] * n) z =", "1, idx[1])) # left if idx[0] < beta.shape[0] - 1: local_check.append((idx[0] + 1,", "This is the varying penalty extension to standard ADMM a *= 2 if", "len(beta.shape) == 1: if idx[0] > 0: local_check.append(idx[0] - 1) # left if", "return 1. / (1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the", "'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_coord_descent(self, x, A, _lambda,", "if idx[0] > 0: local_check.append((idx[0] - 1, idx[1])) # left if idx[0] <", "from scipy.stats import norm from scipy.optimize import minimize, minimize_scalar from scipy.sparse import csc_matrix,", "x = np.sqrt(weights) * y A = (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else:", "dual_residual_t = a * (s_new - s) z = z_new s = s_new", "-self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC = 2k - 2ln(L) aic_trace[i] = 2. *", "negative log-likelihood of the data given our new parameters cur_nll += _lambda *", "= initial_values['beta'] prior_prob = initial_values['c'] u = initial_values['u'] prev_nll = 0 cur_step =", "plateau and the list of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each", "bounds=bounds, options={'disp': verbose}) return results.x def plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized", "'graph' else None if grid_data is not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] =", "== 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x,", "we've converged delta = np.abs(cur_nll - prev_nll) / (prev_nll + converge) if verbose:", "return (weights, posteriors) def _u_objective_func(u, x, A): return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u,", "D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u = a *", "1 a = _lambda # The D matrix is the first-difference operator. K", "and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step,", "the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC = 2k -", "> 10 * dual_resnorm else 0.5 # Recalculate the x_denominator since we changed", "of the plateau while cur_unchecked: idx = cur_unchecked.popleft() # neighbors to check local_check", "coordinate descent.''' if verbose: print('\\t\\tSolving u via Coordinate Descent') u = u0 if", "- 1: local_check.append((idx[0], idx[1] + 1)) # up # Only supports 1d and", "signal_dist self.null_dist = null_dist if penalties_cross_x is None: self.penalties_cross_x = np.dot else: self.penalties_cross_x", "/ exp_beta + beta - (1 + exp_beta) if verbose > 1: print('\\t\\tForming", "local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's indices plateaus.append((val, cur_plateau)) #", "local_check.append(idx[0] + 1) # right # 2d case -- check left, right, up,", "-np.log(c * signal_dist.pdf(data) + (1. - c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist):", "t_dual = initial_values['t_dual'] primal_trace = [] dual_trace = [] converged = False cur_step", "x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_coord_descent(self, x, A,", "= ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm': # Get the", "of the betas given the weights and data.''' return (np.log(1 + np.exp(beta)) -", "u = u0 cur_step = 0 while delta > converge and cur_step <", "deque([idx]) val = beta[idx] min_member = val - rel_tol max_member = val +", "= dual_resnorm < converge_threshold and primal_resnorm < converge_threshold if primal_resnorm > 5 *", "Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the step counter and", "plateau.''' return -np.log(c * signal_dist.pdf(data) + (1. - c) * null_dist.pdf(data)).sum() def single_plateau_regression(data,", "1e-4).sum())) # Return the results of the run return {'beta': beta, 'u': u,", "cur_step < max_steps: # Update x x = (weights * y + a", "it off while to_check and check_map[idx]: try: idx = to_check.popleft() except: break #", "1: local_check.append((idx[0] + 1, idx[1])) # right if idx[1] > 0: local_check.append((idx[0], idx[1]", "return {'x': x, 'r': r, 'z': z, 's': s, 'u_dual': u_dual, 't_dual': t_dual,", "# Track each plateau's indices plateaus.append((val, cur_plateau)) # Returns the list of plateaus", "else: lu_factor = initial_values['lu_factor'] x = initial_values['x'] z = initial_values['z'] r = initial_values['r']", "c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def", "prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def", "self.solver.converge = converge self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights) if initial_values is not", "val + rel_tol # Check every possible boundary of the plateau while cur_unchecked:", "* self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight +", "dof = np.sum(Dx > converge_threshold) + 1. AIC = np.sum((y - x)**2) +", "> 1, initial_values=u) u = self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose >", "checked this one, just pop it off while to_check and check_map[idx]: try: idx", "Get the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC = 2k", "'''Solve for u using alternating direction method of multipliers.''' if verbose: print('\\t\\tSolving u", "np.zeros(lambda_grid.shape) # The AICc score for each lambda value (correcting for finite sample", "= TrailSolver() def add_step(self, w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def", "min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2.,", "print(weights) print(y) if dual_solver == 'cd': # Solve the dual via coordinate descent", "Calculate BIC = -2ln(L) + k * (ln(n) - ln(2pi)) bic_trace[i] = -2", "penalty matrix.''' delta = converge + 1 if initial_values is None: beta =", "s u_dual = u_dual + primal_residual_x t_dual = t_dual + primal_residual_r # Check", "x, A): return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for", "converge self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights) if initial_values is not None: self.solver.beta", "to_check.popleft() except: break # Edge case -- If we went through all the", "1, u0=u) elif dual_solver == 'sls': # Solve the dual via sequential least", "A) cur_step = 0 while delta > converge and cur_step < max_steps: #", "= cur_unchecked.popleft() # neighbors to check local_check = [] # Generic graph case", "check_map[idx]: try: idx = to_check.popleft() except: break # Edge case -- If we", "ADMM routine.''' if verbose: print('\\t\\tSolving via Graph Fused Lasso') # if initial_values is", "cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the solution path", "convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm)", "u, 'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative log-likelihood", "# Clear out all the info from the previous run self.reset() # Fit", "cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's indices plateaus.append((val, cur_plateau)) # Returns the list", "this one, just pop it off while to_check and check_map[idx]: try: idx =", "run return {'beta': beta, 'u': u, 'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data,", "'z': z, 'u': u } self.solver.alpha = alpha self.solver.inflate = inflate self.solver.maxsteps =", "this method only works for the 1-D fused lasso case.''' if verbose: print('\\t\\tSolving", "dtype=bool) check_map[np.isnan(beta)] = True plateaus = [] if verbose: print('\\tCalculating plateaus...') if verbose", "z) z = z_new primal_residual = Dx_relaxed - z # Update u u", "A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u)) r += A.T[coord] * prev_u", "find the best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape)", "> 5 * dual_resnorm: a *= inflate u_dual /= inflate t_dual /= inflate", "cur_objective) / (prev_objective + converge) if verbose and cur_step % 100 == 0:", "if verbose: print('\\t\\tSolving u via L-BFGS-B') if u0 is None: u0 = np.zeros(A.shape[1])", "cd_max_steps, verbose > 1, initial_values=u) #u = self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps,", "support more than 2 dimensions unless edges are specified explicitly. ({0} given)'.format(len(beta.shape))) #", "A = (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights = (prior_prob * (1", "admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the signal probabilities prior_prob = ilogit(beta) cur_nll", "log-likelihood of the betas given the weights and data.''' return (np.log(1 + np.exp(beta))", "Generic graph case if edges is not None: local_check.extend(edges[idx]) # 1d case --", "self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate,", "+ a * primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean())", "elif len(beta.shape) == 2: if idx[0] > 0: local_check.append((idx[0] - 1, idx[1])) #", "import deque from pygfl.solver import TrailSolver class GaussianKnown: ''' A simple Gaussian distribution", "primal_resnorm > 10 * dual_resnorm else 0.5 # Recalculate the x_denominator since we", "n) z = np.zeros(m) u = np.zeros(m) else: x = initial_values['x'] z =", "generalized lasso to find the best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins))", "unpenalized 1-d regression on all of the points in a plateau.''' return minimize_scalar(plateau_loss_func,", "Calculate AICc = AIC + 2k * (k+1) / (n - k -", "= z_new s = s_new # Dual update primal_residual_x = x_accel - z", "> converge_threshold) + 1. AIC = np.sum((y - x)**2) + 2 * dof", "a, _lambda / a) dual_residual = a * D.T.dot(z_new - z) z =", "[i for i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i in dual_residual_u] +", "- alpha) * z # over-relaxation # Update constraint term r arg =", "the list of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's indices", "graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'),", "max_steps: if verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\\tE-step...') # Get the likelihood weights", "Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver != 'admm' and dual_solver != 'graph': # Back", "= _soft_threshold(arg, local_lambda / a) r_accel = alpha * r + (1 -", "is the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x)", "the Expectation-Maximization algorithm for the data with the given penalty matrix.''' delta =", "u, 'dof': dof, 'AIC': AIC} def _u_coord_descent(self, x, A, _lambda, converge, max_steps, verbose,", "_soft_threshold(Dx_relaxed + u / a, _lambda / a) dual_residual = a * D.T.dot(z_new", "max_steps, verbose, u0=None): '''Solve for u using coordinate descent.''' if verbose: print('\\t\\tSolving u", "bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) - np.log(2 * np.pi))", "converge) if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f}", "permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n) # likelihood term z = np.zeros(n) #", "Method of Multipliers (1-D fused lasso)') n = len(y) m = n -", "= minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000}) if", "1: local_check.append((idx[0], idx[1] + 1)) # up # Only supports 1d and 2d", "being checked so it's not re-checked unnecessarily check_map[local_idx] = True # Add it", "beta, 'z': z, 'u': u } self.solver.alpha = alpha self.solver.inflate = inflate self.solver.maxsteps", "negative log-likelihood tracker prev_nll = cur_nll # DEBUGGING if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(),", "np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def run(self, data, penalties, _lambda=0.1, converge=0.00001,", "dof, 'AIC': AIC} def _u_coord_descent(self, x, A, _lambda, converge, max_steps, verbose, u0=None): '''Solve", "[0] n = len(f) x = [0] * n for i in range(n-1):", "W # diagonal entries Kl = np.array([-a] * (n-1)) # below the diagonal", "data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data):", "{1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) #", "beta[local_idx] <= max_member: # Label this index as being checked so it's not", "initial_values=initial_values) if verbose: print('Calculating degrees of freedom') # Create a grid structure out", "val - rel_tol max_member = val + rel_tol # Check every possible boundary", "np.abs(prev_nll - cur_nll) / (prev_nll + converge) if verbose > 1: print('\\t\\tM-step delta:", "u = np.zeros(m) else: x = initial_values['x'] z = initial_values['z'] u = initial_values['u']", "# return {'beta': beta, 'z': z, 'u': u } self.solver.alpha = alpha self.solver.inflate", "ilogit(x): return 1. / (1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate", "+ 1. AIC = np.sum((y - x)**2) + 2 * dof return {'x':", "np.exp(beta) / (1 + np.exp(beta)) u = initial_values else: beta = initial_values['beta'] prior_prob", "return -np.log(signal_weight + null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate the complete-data sufficient statistics", "np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def run(self, data, penalties,", "x_numerator = 1.0 / a * weights * y + D.T.dot(a * z", "= [0] beta = [0] n = len(f) x = [0] * n", "plateaus def plateau_loss_func(c, data, signal_dist, null_dist): '''The negative log-likelihood function for a plateau.'''", "= np.array([y.mean()] * n) # likelihood term z = np.zeros(n) # slack variable", "= z t_dual = np.zeros(m) # scaled dual variable for constraint r =", "== 'sls': # Solve the dual via sequential least squares u = self._u_slsqp(x,", "try: idx = to_check.popleft() except: break # Edge case -- If we went", "to standard ADMM a *= 2 if primal_resnorm > 10 * dual_resnorm else", "= initial_values['c'] u = initial_values['u'] prev_nll = 0 cur_step = 0 while delta", "= np.sum(Dx > converge_threshold) + 1. AIC = np.sum((y - x)**2) + 2", "# u = initial_values['u'] # n = y.shape[0] # self.graphfl_weight(n, y, weights, ntrails,", "False, 'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu:", "= [] def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001,", "max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of multipliers.'''", "1d case -- check left and right elif len(beta.shape) == 1: if idx[0]", "self.null_dist = null_dist if penalties_cross_x is None: self.penalties_cross_x = np.dot else: self.penalties_cross_x =", "# Count the number of free parameters in the grid (dof) plateaus =", "cd_max_steps, verbose > 1, initial_values=u) u = self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps,", "if grid_map is not None: plateau_data = np.array([data[grid_map[x,y]] for x,y in p]) else:", "converged = False cur_step = 0 while not converged and cur_step < max_steps:", "verbose: print('\\tDelta: {0}'.format(delta)) # Track the step self.add_step(post_prob, beta, prior_prob, delta) # Increment", "of the generalized lasso to find the best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda),", "AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace,", "min(_lambda, max(-_lambda, next_u)) r += A.T[coord] * prev_u - A.T[coord] * u[coord] #", "info from the previous run self.reset() # Fit to the final values results", "2: if idx[0] > 0: local_check.append((idx[0] - 1, idx[1])) # left if idx[0]", "= converge + 1 u = u0 cur_step = 0 while delta >", "- alpha) * s # Projection to constraint set arg = x_accel +", "aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] - 1.)", "cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion", "print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache the exponentiated beta exp_beta = np.exp(beta)", "grid_map=None, verbose=False): '''Perform unpenalized 1-d regression for each of the plateaus.''' weights =", "of weights. We use a tridiagonal representation # of K. Kd = np.array([a]", "step counter and update the previous objective value cur_step += 1 prev_objective =", "LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n) # likelihood", "_lambda * np.abs(u['r']).sum() # Track the change in log-likelihood to see if we've", "= a * _1d_fused_lasso_crossprod(z_new - z) z = z_new primal_residual = Dx -", "if verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers (1-D fused lasso)')", "self._e_step(data, weights) weights = weights.flatten() return (weights, posteriors) def _u_objective_func(u, x, A): return", "#dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 # Use the naive DoF if", "cur_nll += _lambda * np.abs(u['r']).sum() # Track the change in log-likelihood to see", "- cur_objective) / (prev_objective + converge) if verbose and cur_step % 100 ==", "check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus = [] if verbose: print('\\tCalculating", "raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver != 'admm' and dual_solver != 'graph': #", "likelihood weights vector (E-step) post_prob = self._e_step(data, prior_prob) if verbose: print('\\tM-step...') # Find", "2 if primal_resnorm > 10 * dual_resnorm else 0.5 # Recalculate the x_denominator", "box constraints bounds = [(-_lambda, _lambda) for u0_i in u0] results = minimize(_u_objective_func,", "self._u_slsqp(x, A, _lambda, verbose > 1, u0=u) elif dual_solver == 'lbfgs': # Solve", "and dual variables if initial_values is None: x = np.array([y.mean()] * n) z", "null_dist): '''The negative log-likelihood function for a plateau.''' return -np.log(c * signal_dist.pdf(data) +", "x = z t_dual = np.zeros(m) # scaled dual variable for constraint r", "print('\\tE-step...') # Get the likelihood weights vector (E-step) post_prob = self._e_step(data, prior_prob) if", "[] # Generic graph case if edges is not None: local_check.extend(edges[idx]) # 1d", "{'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u':", "print('\\tPlateau #{0}'.format(i+1)) # Get the subset of grid points for this plateau if", "_u_objective_func(u, x, A) delta = np.abs(prev_objective - cur_objective) / (prev_objective + converge) if", "set([idx]) cur_unchecked = deque([idx]) val = beta[idx] min_member = val - rel_tol max_member", "over-relax Dx z_new = _soft_threshold(Dx_relaxed + u / a, _lambda / a) dual_residual", "# Get the signal probabilities prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if", "- np.abs(arg) / 2.).clip(0) if adaptive else _lambda r = _soft_threshold(arg, local_lambda /", "*= inflate # Update the step counter cur_step += 1 if verbose and", "indices plateaus.append((val, cur_plateau)) # Returns the list of plateaus and their values return", "and data.''' return (np.log(1 + np.exp(beta)) - post_prob * beta).sum() def _graph_fused_lasso(self, y,", "Projection to constraint set arg = x_accel + u_dual + D.T.dot(r_accel + t_dual)", "post_prob * beta).sum() def _graph_fused_lasso(self, y, weights, _lambda, ntrails, trails, breakpoints, edges, converge,", "a * _1d_fused_lasso_crossprod(z_new - z) z = z_new primal_residual = Dx - z", "A, _lambda, verbose > 1, u0=u) elif dual_solver == 'lbfgs': # Solve the", "_u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently", "5 * dual_resnorm: a *= inflate u_dual /= inflate t_dual /= inflate elif", "= self._u_lbfgsb(x, A, _lambda, verbose > 1, u0=u) elif dual_solver == 'admm': #", "has an optimized ADMM routine.''' if verbose: print('\\t\\tSolving via Graph Fused Lasso') #", "* (np.log(len(flat_data)) - np.log(2 * np.pi)) # Track the best model thus far", "initial_values['x'] z = initial_values['z'] u = initial_values['u'] primal_trace = [] dual_trace = []", "admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees of freedom') # Create a", "- A.dot(u))**2 def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda,", "print('\\t\\tM-step delta: {0}'.format(delta)) # Increment the step counter cur_step += 1 # Update", "descent.''' if verbose: print('\\t\\tSolving u via Coordinate Descent') u = u0 if u0", "(f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] +", "the generalized lasso to find the best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda),", "np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights = (prior_prob * (1 - prior_prob)) y =", "return {'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace),", "np.clip(beta, -20, 20) # u = None else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if", "''' Load the graph fused lasso library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') #", "primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold) + 1. AIC =", "initial_values is None: beta = np.zeros(data.shape) prior_prob = np.exp(beta) / (1 + np.exp(beta))", "_m_step(self, beta, prior_prob, post_prob, penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None,", "t_dual /= inflate elif dual_resnorm > 5 * primal_resnorm: a /= inflate u_dual", "plateaus.append((val, cur_plateau)) # Returns the list of plateaus and their values return plateaus", "== 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx", "post_prob, penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False):", "[a]) + W # diagonal entries Kl = np.array([-a] * (n-1)) # below", "i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] =", "check_map[local_idx] = True # Add it to the plateau and the list of", "cur_step = 0 D_full = D while not converged and cur_step < max_steps:", "else: grid_points = results['beta'].reshape(data.shape) # Count the number of free parameters in the", "bic_trace[best_idx]: best_idx = i best_plateaus = plateaus # Save the final run parameters", "2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold", "verbose: print('\\tCalculating plateaus...') if verbose > 1: print('\\tIndices to check {0} {1}'.format(len(to_check), check_map.shape))", "(signal_weight + null_weight) return post_prob def _m_step(self, beta, prior_prob, post_prob, penalties, _lambda, converge,", "evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self, x, A, _lambda,", "dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold if", "[] self.c_iters = [] self.delta_iters = [] # ''' Load the graph fused", "== 'admm': # Get the negative log-likelihood of the data given our new", "= D while not converged and cur_step < max_steps: # Update x x", "self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose > 1, initial_values=u) #u = self._u_admm(y, weights,", "* dual_resnorm: a *= inflate u_dual /= inflate t_dual /= inflate elif dual_resnorm", "penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose - 1), admm_alpha, admm_inflate, initial_values=u) beta", "__repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist", "= initial_values['u'] prev_nll = 0 cur_step = 0 while delta > converge and", "cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees of", "beta from the dual solution beta = y - (1. / weights) *", "z) z = z_new primal_residual = Dx - z #primal_residual = Dx_hat -", "cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm': # Get the negative log-likelihood", "ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self, w, beta,", "if verbose: print('Calculating AIC') # Get the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c'])", "inflate # Update the step counter cur_step += 1 if verbose and cur_step", "- z) z = z_new primal_residual = Dx_relaxed - z # Update u", "_u_lbfgsb(self, x, A, _lambda, verbose, u0=None): '''Solve for u using L-BFGS-B.''' if verbose:", "u0_i in u0] results = minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp':", "weights.flatten() return (weights, posteriors) def _u_objective_func(u, x, A): return np.linalg.norm(x - A.dot(u))**2 def", "= u + a * primal_residual # Check convergence primal_resnorm = np.sqrt((primal_residual **", "to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal", "grid_points = results['beta'].reshape(data.shape) # Count the number of free parameters in the grid", "= self._u_slsqp(x, A, _lambda, verbose > 1, u0=u) elif dual_solver == 'lbfgs': #", "u): '''Constraint function for the i'th value of u.''' return np.array([_lambda - np.abs(u[idx])])", "* primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm =", "alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of multipliers. Note that", "out beta from the dual solution beta = y - (1. / weights)", "regression for each of the plateaus.''' weights = np.zeros(data.shape) for i,(level,p) in enumerate(plateaus):", "ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm': # Get the negative", "prev_objective = cur_objective return u def _u_slsqp(self, x, A, _lambda, verbose, u0=None): '''Solve", "'AIC': AIC} def _u_coord_descent(self, x, A, _lambda, converge, max_steps, verbose, u0=None): '''Solve for", "np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda, _lambda) for u0_i in", "p]) else: plateau_data = np.array([data[x,y] for x,y in p]) w = single_plateau_regression(plateau_data, self.signal_dist,", "idx[1])) # left if idx[0] < beta.shape[0] - 1: local_check.append((idx[0] + 1, idx[1]))", "admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm for the data with the", "#u = self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u) u", "Loop until every beta index has been checked while to_check: if verbose >", "primal_residual_r = r_accel - s u_dual = u_dual + primal_residual_x t_dual = t_dual", "(1 + np.exp(beta)) u = initial_values else: beta = initial_values['beta'] prior_prob = initial_values['c']", "the diagonal Ku = np.array([-a] * (n-1)) # above the diagonal cur_step +=", "def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters =", "beta[local_idx] >= min_member \\ and beta[local_idx] <= max_member: # Label this index as", "for constraint r = s else: lu_factor = initial_values['lu_factor'] x = initial_values['x'] z", "cur_step = 0 while not converged and cur_step < max_steps: # Update x", "delta > converge and cur_step < max_steps: if verbose: print('Step #{0}'.format(cur_step)) if verbose:", "weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u) u = self._u_admm_lucache(y, weights,", "loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean,", "D.shape[0] a = _lambda # step-size parameter # Set up system involving graph", "m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating", "{1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph': print('\\tdegrees of freedom:", "Ku = np.array([-a] * (n-1)) # above the diagonal cur_step += 1 if", "* y A = (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights = (prior_prob", "x in beta.shape])) if edges is None else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool)", "= self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose > 1, initial_values=u) #u = self._u_admm(y,", "routine.''' if verbose: print('\\t\\tSolving via Graph Fused Lasso') # if initial_values is None:", "if initial_values is None: x = np.array([y.mean()] * n) z = np.zeros(m) u", "{'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y,", "= np.array([data[grid_map[x,y]] for x,y in p]) else: plateau_data = np.array([data[x,y] for x,y in", "list of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's indices plateaus.append((val,", "== 2: if idx[0] > 0: local_check.append((idx[0] - 1, idx[1])) # left if", "L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the LU decomposition lu_factor = sla.splu(L,", "return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def", "admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion about the current iterate and", "a * primal_residual # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm =", "Update the negative log-likelihood tracker prev_nll = cur_nll # DEBUGGING if verbose: print('\\tbeta:", "scaled dual variable for constraint r = s else: lu_factor = initial_values['lu_factor'] x", "beta, prior_prob, delta) # Increment the step counter cur_step += 1 # Update", "_soft_threshold(arg, local_lambda / a) r_accel = alpha * r + (1 - alpha)", "change in log-likelihood to see if we've converged delta = np.abs(cur_nll - prev_nll)", "n) # likelihood term z = np.zeros(n) # slack variable for likelihood r", "beta) delta = converge + 1 u = u0 cur_step = 0 while", "* r + (1 - alpha) * s # Projection to constraint set", "if verbose > 1: print('\\t\\tForming dual...') x = np.sqrt(weights) * y A =", "# Initialize primal and dual variables if initial_values is None: x = np.array([y.mean()]", "for u using L-BFGS-B.''' if verbose: print('\\t\\tSolving u via L-BFGS-B') if u0 is", "multipliers. Note that this method only works for the 1-D fused lasso case.'''", "self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'),", "- u_dual)) / (weights + a) x_accel = alpha * x + (1", "cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm))", "norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data,", "grid_map is not None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map != -1]] else: grid_points", "values in linear time.''' to_check = deque(itertools.product(*[range(x) for x in beta.shape])) if edges", "scaled dual variable for constraint x = z t_dual = np.zeros(m) # scaled", "grid_data is not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i, _lambda", "lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None,", "resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0} AIC: {1} AICc:", "log-likelihood tracker prev_nll = cur_nll # DEBUGGING if verbose: print('\\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max()))", "D.T.dot(D) W_over_a = np.diag(weights / a) x_denominator = W_over_a + L #x_denominator =", "np.zeros(breakpoints[-1], dtype='double') # else: # beta = initial_values['beta'] # z = initial_values['z'] #", "np.zeros(n) # slack variable for likelihood r = np.zeros(m) # penalty term s", "Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0] beta = [0] n = len(f)", "> 20: # beta = np.clip(beta, -20, 20) # u = None else:", "range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1] -", "converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for u using alternating direction", "# Update the negative log-likelihood tracker prev_nll = cur_nll return beta, u def", "'dof': dof, 'AIC': AIC} def _u_coord_descent(self, x, A, _lambda, converge, max_steps, verbose, u0=None):", "return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) - x) def", "= Dx - z #primal_residual = Dx_hat - z # Update u u", "self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self, y, weights,", "initial_values=u) u = self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u,", "'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def run(self, data, penalties, _lambda=0.1,", "# Return the results of the run return {'beta': beta, 'u': u, 'w':", "+= _lambda * np.abs(u['r']).sum() # Track the change in log-likelihood to see if", "cur_unchecked.popleft() # neighbors to check local_check = [] # Generic graph case if", "self.solver.z = initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z, 'u':", "check_map.shape)) # Loop until every beta index has been checked while to_check: if", "= np.zeros(m) # slack variable for penalty u_dual = np.zeros(n) # scaled dual", "False cur_step = 0 D_full = D while not converged and cur_step <", "= self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u) u =", "# Update x x = (weights * y + a * (z -", "# over-relaxation # Update constraint term r arg = s - t_dual local_lambda", "y + D.T.dot(a * z - u) x = np.linalg.solve(x_denominator, x_numerator) Dx =", "* dual_resnorm else 0.5 # Recalculate the x_denominator since we changed the step-size", "K is the matrix (W + a D^T D) # where W is", "# neighbors to check local_check = [] # Generic graph case if edges", "np.array([y.mean()] * n) z = np.zeros(m) u = np.zeros(m) else: x = initial_values['x']", "# Track the best model thus far if best_idx is None or bic_trace[i]", "verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of multipliers. Note", "10 * dual_resnorm else 0.5 # Recalculate the x_denominator since we changed the", "* dof return {'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC}", "solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = [] u_trace = [] w_trace = []", "the diagonal cur_step += 1 if verbose and cur_step % 100 == 0:", "aic_trace = np.zeros(lambda_grid.shape) # The AIC score for each lambda value aicc_trace =", "W is the diagonal matrix of weights. We use a tridiagonal representation #", "self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints, _lambda, alpha, inflate, max_steps, converge, beta, z,", "break # Create the plateau and calculate the inclusion conditions cur_plateau = set([idx])", "# Create the plateau and calculate the inclusion conditions cur_plateau = set([idx]) cur_unchecked", "it to the plateau and the list of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx)", "verbose: print('#{0} Lambda = {1}'.format(i, _lambda)) # Clear out all the info from", "variable for constraint x = z t_dual = np.zeros(m) # scaled dual variable", "x = initial_values['x'] z = initial_values['z'] u = initial_values['u'] primal_trace = [] dual_trace", "not converged and cur_step < max_steps: # Update x out = _1d_fused_lasso_crossprod(a*z -", "penalties.T.dot(u) # Get the current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) # Track the", "beta, 'u': u, 'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the", "grid_data=None, grid_map=None): '''Follows the solution path of the generalized lasso to find the", "best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The", "beta = u['beta'] # if np.abs(beta).max() > 20: # beta = np.clip(beta, -20,", "admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion about the current iterate and coordinate", "+ u / a, _lambda / a) dual_residual = a * _1d_fused_lasso_crossprod(z_new -", "far if best_idx is None or bic_trace[i] < bic_trace[best_idx]: best_idx = i best_plateaus", "D while not converged and cur_step < max_steps: # Update x x =", "of beta values in linear time.''' to_check = deque(itertools.product(*[range(x) for x in beta.shape]))", "# slack variable for penalty u_dual = np.zeros(n) # scaled dual variable for", "= x_accel + u_dual + D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg) s_new =", "for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1]", "x, A, _lambda, converge, max_steps, verbose, u0=None): '''Solve for u using coordinate descent.'''", "alpha = [0] beta = [0] n = len(f) x = [0] *", "post_prob) / weights print(weights) print(y) if dual_solver == 'cd': # Solve the dual", "given the weights and data.''' return (np.log(1 + np.exp(beta)) - post_prob * beta).sum()", "# Create our box constraints bounds = [(-_lambda, _lambda) for u0_i in u0]", "- (1 + exp_beta) if verbose > 1: print('\\t\\tForming dual...') x = np.sqrt(weights)", "'''Calculate the negative log-likelihood of the data given the weights.''' signal_weight = prior_prob", "_lambda) for _ in u0] # Fit results = minimize(_u_objective_func, u0, args=(x, A),", "= -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC = 2k - 2ln(L) aic_trace[i] = 2.", "else: self.penalties_cross_x = penalties_cross_x self.w_iters = [] self.beta_iters = [] self.c_iters = []", "and cur_step < max_steps: # Update x x_numerator = 1.0 / a *", "if dual_solver == 'cd': # Solve the dual via coordinate descent u =", "= w posteriors = self._e_step(data, weights) weights = weights.flatten() return (weights, posteriors) def", "a time. for coord in range(len(u)): prev_u = u[coord] next_u = prev_u +", "u using alternating direction method of multipliers. Note that this method only works", "None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map != -1]] else: grid_points = results['beta'].reshape(data.shape) #", "and calculate the inclusion conditions cur_plateau = set([idx]) cur_unchecked = deque([idx]) val =", "# Calculate AICc = AIC + 2k * (k+1) / (n - k", "# Save the final run parameters to use for warm-starting the next iteration", "print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x, 'r': r,", "point on the grid idx = to_check.popleft() # If we already have checked", "10 * dual_resnorm else 0.5 Kd = np.array([a] + [2*a] * (n-2) +", "converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if", "prior_prob, post_prob, penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2.,", "evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self,", "'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose,", "linalg as sla from functools import partial from collections import deque from pygfl.solver", "graph Laplacian L = D.T.dot(D) W_over_a = np.diag(weights / a) x_denominator = W_over_a", "100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof =", "= tridiagonal_solve(Kl, Ku, Kd, W * y + out) Dx = np.ediff1d(x) #", "verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of multipliers.''' if", "plateaus (degrees of freedom) of a 1d or 2d grid of beta values", "= results['beta'].reshape(data.shape) # Count the number of free parameters in the grid (dof)", "over-relaxation # Update constraint term r arg = s - t_dual local_lambda =", "2ln(L) aic_trace[i] = 2. * dof_trace[i] - 2. * log_likelihood_trace[i] # Calculate AICc", "cd_max_steps, verbose > 1, u0=u) elif dual_solver == 'sls': # Solve the dual", "algorithm for the data with the given penalty matrix.''' delta = converge +", "[] self.c_iters = [] self.delta_iters = [] def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20,", "initial_values['s'] u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace = [] dual_trace = []", "verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD Delta:", "(np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 # Use the naive DoF if verbose: print('Calculating", "is not None: local_check.extend(edges[idx]) # 1d case -- check left and right elif", "for i, _lambda in enumerate(lambda_grid): if verbose: print('#{0} Lambda = {1}'.format(i, _lambda)) #", "prev_nll = 0 cur_step = 0 while delta > converge and cur_step <", "the objective function value cur_objective = _u_objective_func(u, x, A) delta = np.abs(prev_objective -", "= val - rel_tol max_member = val + rel_tol # Check every possible", "p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in p: weights[idx if grid_map", "variables from warm start if initial_values is None: # Graph Laplacian L =", "sparse.linalg.inv(W_over_a + L) # Update the step counter cur_step += 1 if verbose", "scipy.stats import norm from scipy.optimize import minimize, minimize_scalar from scipy.sparse import csc_matrix, linalg", "z, 'u': u } self.solver.alpha = alpha self.solver.inflate = inflate self.solver.maxsteps = max_steps", "_soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a", "AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best setting", "penalties_cross_x is None: self.penalties_cross_x = np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters = []", "* (n-1)) # above the diagonal cur_step += 1 if verbose and cur_step", "constraint r = s else: lu_factor = initial_values['lu_factor'] x = initial_values['x'] z =", "self.stdev = stdev def pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return", "Update the step counter cur_step += 1 if verbose and cur_step % 100", "(W + a D^T D) # where W is the diagonal matrix of", "for i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i in dual_residual_u] + [i", "1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) # Get the next unchecked point on the", "np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1], dtype='double') #", "numpy as np from scipy import sparse from scipy.stats import norm from scipy.optimize", "# scaled dual variable for constraint r = s else: lu_factor = initial_values['lu_factor']", "c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i]", "converged and cur_step < max_steps: # Update x out = _1d_fused_lasso_crossprod(a*z - u)", "on norm of primal and dual residuals a *= 2 if primal_resnorm >", "2k * (k+1) / (n - k - 1) aicc_trace[i] = aic_trace[i] +", "expansion about the current iterate and coordinate descent to optimize Beta. ''' prev_nll", "weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating", "(1 - alpha) * z # over-relaxation # Update constraint term r arg", "= W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a + L) # Initialize primal and", "to the plateau and the list of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) #", "the step-size # TODO: is this worth it? We're paying a matrix inverse", "z = z_new primal_residual = Dx - z #primal_residual = Dx_hat - z", "dual_residual = a * _1d_fused_lasso_crossprod(z_new - z) z = z_new primal_residual = Dx", "cur_step, 'lu_factor': lu_factor} def _u_admm(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.0,", "negative log-likelihood of the data given the weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data)", "converged and cur_step < max_steps: # Update x x = (weights * y", "self.c_iters = [] self.delta_iters = [] def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5,", "regression on all of the points in a plateau.''' return minimize_scalar(plateau_loss_func, args=(data, signal_dist,", "variable for likelihood r = np.zeros(m) # penalty term s = np.zeros(m) #", "in p]) else: plateau_data = np.array([data[x,y] for x,y in p]) w = single_plateau_regression(plateau_data,", "if verbose: print('\\t\\tSolving u via Coordinate Descent') u = u0 if u0 is", "converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold # Update step-size parameter", "_lambda, converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating", "else grid_map[idx[0], idx[1]]] = w posteriors = self._e_step(data, weights) weights = weights.flatten() return", "if grid_data is not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i,", "in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold", "score for each lambda value (correcting for finite sample size) bic_trace = np.zeros(lambda_grid.shape)", "= inflate * _lambda # step-size parameter # Initialize primal and dual variables", "rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus (degrees of freedom) of a 1d or", "beta[idx] min_member = val - rel_tol max_member = val + rel_tol # Check", "single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d regression on all of the points in", "u): jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute", "return (np.log(1 + np.exp(beta)) - post_prob * beta).sum() def _graph_fused_lasso(self, y, weights, _lambda,", "_lambda # The D matrix is the first-difference operator. K is the matrix", "None flat_data = data.flatten() edges = penalties[3] if dual_solver == 'graph' else None", "Alternating Direction Method of Multipliers') n = len(y) m = D.shape[0] a =", "< beta.shape[1] - 1: local_check.append((idx[0], idx[1] + 1)) # up # Only supports", "AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace,", "beta exp_beta = np.exp(beta) # Form the parameters for our weighted least squares", "Update z Dx_hat = alpha * Dx + (1 - alpha) * z", "Update step-size parameter based on norm of primal and dual residuals a *=", "# Update step-size parameter based on norm of primal and dual residuals a", "+ exp_beta) if verbose > 1: print('\\t\\tForming dual...') x = np.sqrt(weights) * y", "posteriors = self._e_step(data, weights) weights = weights.flatten() return (weights, posteriors) def _u_objective_func(u, x,", "verbose: print('\\t\\tSolving u via Coordinate Descent') u = u0 if u0 is not", "= np.nan for i, _lambda in enumerate(lambda_grid): if verbose: print('#{0} Lambda = {1}'.format(i,", "log-likelihood of the data given the weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight", "primal_resnorm = np.sqrt((np.array([i for i in primal_residual_x] + [i for i in primal_residual_r])**2).mean())", "# right # 2d case -- check left, right, up, and down elif", "= [] results_trace = [] best_idx = None best_plateaus = None flat_data =", "= x - A.dot(u) delta = converge + 1 prev_objective = _u_objective_func(u, x,", "for varying the step size #W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights", "a) dual_residual = a * _1d_fused_lasso_crossprod(z_new - z) z = z_new primal_residual =", "def plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized 1-d regression for each of", "return results.x def _u_lbfgsb(self, x, A, _lambda, verbose, u0=None): '''Solve for u using", "of primal and dual residuals # This is the varying penalty extension to", "'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda':", "print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def", "np.zeros(data.shape) prior_prob = np.exp(beta) / (1 + np.exp(beta)) u = initial_values else: beta", "each lambda value (correcting for finite sample size) bic_trace = np.zeros(lambda_grid.shape) # The", "a plateau.''' return -np.log(c * signal_dist.pdf(data) + (1. - c) * null_dist.pdf(data)).sum() def", "matrix is the first-difference operator. K is the matrix (W + a D^T", "for local_idx in local_check: if not check_map[local_idx] \\ and beta[local_idx] >= min_member \\", "GaussianKnown: ''' A simple Gaussian distribution with known mean and stdev. ''' def", "= c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int,", "prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha,", "- x)**2) + 2 * dof return {'x': x, 'z': z, 'u': u,", "= [] if verbose: print('\\tCalculating plateaus...') if verbose > 1: print('\\tIndices to check", "minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000}) if verbose:", "w posteriors = self._e_step(data, weights) weights = weights.flatten() return (weights, posteriors) def _u_objective_func(u,", "best_idx = None best_plateaus = None flat_data = data.flatten() edges = penalties[3] if", "Method of Multipliers') n = len(y) m = D.shape[0] a = inflate *", "the complete-data sufficient statistics (weights vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight =", "unpenalized 1-d regression for each of the plateaus.''' weights = np.zeros(data.shape) for i,(level,p)", "# self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints, _lambda, alpha, inflate, max_steps, converge, beta,", "is this worth it? We're paying a matrix inverse in exchange for varying", "** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm <", "u) # return {'beta': beta, 'z': z, 'u': u } self.solver.alpha = alpha", "ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) - np.log(2 *", "cur_step < max_steps: # Update x x_numerator = 1.0 / a * weights", "y.shape[0] # self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints, _lambda, alpha, inflate, max_steps, converge,", "A).sum(axis=0) r = x - A.dot(u) delta = converge + 1 prev_objective =", "simple Gaussian distribution with known mean and stdev. ''' def __init__(self, mean, stdev):", "each final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = [] u_trace = [] w_trace", "cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the signal probabilities prior_prob", "= np.abs(prev_nll - cur_nll) / (prev_nll + converge) if verbose > 1: print('\\t\\tM-step", "np.diag(weights / a) x_denominator = W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a + L)", "< max_steps: if verbose > 1: print('\\t\\tM-Step iteration #{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache", "np.log(2 * np.pi)) # Track the best model thus far if best_idx is", "def _e_step(self, data, prior_prob): '''Calculate the complete-data sufficient statistics (weights vector).''' signal_weight =", "'admm' and dual_solver != 'graph': # Back out beta from the dual solution", "initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace = [] dual_trace = [] converged = False", "{:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist", "while delta > converge and cur_step < max_steps: if verbose: print('Step #{0}'.format(cur_step)) if", "m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows", "a tridiagonal representation # of K. Kd = np.array([a] + [2*a] * (n-2)", "x = tridiagonal_solve(Kl, Ku, Kd, W * y + out) Dx = np.ediff1d(x)", "is not None: plateau_data = np.array([data[grid_map[x,y]] for x,y in p]) else: plateau_data =", "* A).sum(axis=0) r = x - A.dot(u) delta = converge + 1 prev_objective", "http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0] beta = [0] n = len(f) x", "Dx_relaxed - z # Update u u = u + a * primal_residual", "{2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the step counter and update the previous objective", "values return plateaus def plateau_loss_func(c, data, signal_dist, null_dist): '''The negative log-likelihood function for", "A, _lambda, verbose, u0=None): '''Solve for u using sequential least squares.''' if verbose:", "= np.zeros(m) # scaled dual variable for constraint r = s else: lu_factor", "Graph Fused Lasso') # if initial_values is None: # beta = np.zeros(y.shape, dtype='double')", "np.pi)) # Track the best model thus far if best_idx is None or", "0 cur_step = 0 while delta > converge and cur_step < max_steps: if", "return {'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self,", "- prior_prob)) y = beta - (prior_prob - post_prob) / weights print(weights) print(y)", "idx in p: weights[idx if grid_map is None else grid_map[idx[0], idx[1]]] = w", "> 1: print('\\t\\tForming dual...') x = np.sqrt(weights) * y A = (1. /", "cur_nll return beta, u def _m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood of the", "alternating direction method of multipliers.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method", "squares if dual_solver != 'admm' and dual_solver != 'graph': # weights is a", "'''Runs the Expectation-Maximization algorithm for the data with the given penalty matrix.''' delta", "Update x out = _1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl, Ku, Kd, W", "x)**2) + 2 * dof return {'x': x, 'z': z, 'u': u, 'dof':", "weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose - 1), admm_alpha,", "structure out of the vector of betas if grid_map is not None: grid_points[grid_map", "= self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose > 1, u0=u) elif dual_solver ==", "variables if initial_values is None: x = np.array([y.mean()] * n) z = np.zeros(m)", "ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self, w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta)", "# Fit results = minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return", "weights) * penalties.T.dot(u) # Get the current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) #", "= deque([idx]) val = beta[idx] min_member = val - rel_tol max_member = val", "= 2. * dof_trace[i] - 2. * log_likelihood_trace[i] # Calculate AICc = AIC", "BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best setting (by BIC): lambda={0} [DoF:", "< converge_threshold and primal_resnorm < converge_threshold if primal_resnorm > 5 * dual_resnorm: a", "deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus = [] if verbose:", "dual_solver == 'cd': # Solve the dual via coordinate descent u = self._u_coord_descent(x,", "= np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda, _lambda) for u0_i", "posteriors) def _u_objective_func(u, x, A): return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x, A):", "'graph': u = self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0,", "= np.array([y.mean()] * n) z = np.zeros(m) u = np.zeros(m) else: x =", "Fit results = minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x", "-- If we went through all the indices without reaching an unchecked one.", "of multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose > 1, initial_values=u)", "} def _u_admm_lucache(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2.,", "beta, prior_prob, post_prob, penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1.,", "if dual_solver != 'graph': print('\\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the", "= [0] n = len(f) x = [0] * n for i in", "verbose > 1, initial_values=u) #u = self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose", "self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters =", "= inflate self.solver.maxsteps = max_steps self.solver.converge = converge self.solver.set_data(y, edges, ntrails, trails, breakpoints,", "edges, converge, max_steps, verbose, alpha, inflate, initial_values=None): '''Solve for u using a super", "# Get the subset of grid points for this plateau if grid_map is", "descent u = self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose > 1, u0=u) elif", "'sls': # Solve the dual via sequential least squares u = self._u_slsqp(x, A,", "Track each plateau's indices plateaus.append((val, cur_plateau)) # Returns the list of plateaus and", "admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the solution path of the generalized lasso", "plateau_data = np.array([data[x,y] for x,y in p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for", "#{0}'.format(cur_step)) print('\\t\\tTaylor approximation...') # Cache the exponentiated beta exp_beta = np.exp(beta) # Form", "if primal_resnorm > 10 * dual_resnorm else 0.5 Kd = np.array([a] + [2*a]", "'''Constraint function for the i'th value of u.''' return np.array([_lambda - np.abs(u[idx])]) def", "Get the negative log-likelihood of the data given our new parameters cur_nll +=", "x = np.array([y.mean()] * n) z = np.zeros(m) u = np.zeros(m) else: x", "# Projection to constraint set arg = x_accel + u_dual + D.T.dot(r_accel +", "aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace,", "where D is the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda):", "[0] * n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i]", "x, 'r': r, 'z': z, 's': s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace,", "of multipliers. Note that this method only works for the 1-D fused lasso", "# Only supports 1d and 2d cases for now else: raise Exception('Degrees of", "'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative log-likelihood of", "(weights + a) x_accel = alpha * x + (1 - alpha) *", "null_weight) return post_prob def _m_step(self, beta, prior_prob, post_prob, penalties, _lambda, converge, max_steps, cd_converge,", "it? We're paying a matrix inverse in exchange for varying the step size", "= [(-_lambda, _lambda) for _ in u0] # Fit results = minimize(_u_objective_func, u0,", "of Multipliers (1-D fused lasso)') n = len(y) m = n - 1", "weights print(weights) print(y) if dual_solver == 'cd': # Solve the dual via coordinate", "= z_new primal_residual = Dx_relaxed - z # Update u u = u", "lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u = a * (z_new - z) dual_residual_t =", "varying penalty extension to standard ADMM a *= 2 if primal_resnorm > 10", "while to_check and check_map[idx]: try: idx = to_check.popleft() except: break # Edge case", "if verbose: print('\\tM-step...') # Find beta using an alternating Taylor approximation and convex", "False cur_step = 0 while not converged and cur_step < max_steps: # Update", "verbose, u0=None): '''Solve for u using L-BFGS-B.''' if verbose: print('\\t\\tSolving u via L-BFGS-B')", "= np.clip(beta, -20, 20) # u = None else: raise Exception('Unknown solver: {0}'.format(dual_solver))", "- 1) # left if idx[0] < beta.shape[0] - 1: local_check.append(idx[0] + 1)", "the parameters for our weighted least squares if dual_solver != 'admm' and dual_solver", "using a super fast graph fused lasso library that has an optimized ADMM", "beta.shape])) if edges is None else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] =", "initial_values['x'] z = initial_values['z'] r = initial_values['r'] s = initial_values['s'] u_dual = initial_values['u_dual']", "True plateaus = [] if verbose: print('\\tCalculating plateaus...') if verbose > 1: print('\\tIndices", "x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W,", "weights = weights.flatten() return (weights, posteriors) def _u_objective_func(u, x, A): return np.linalg.norm(x -", "check_map[idx]: break # Create the plateau and calculate the inclusion conditions cur_plateau =", "data, prior_prob): '''Calculate the negative log-likelihood of the data given the weights.''' signal_weight", "out = _1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl, Ku, Kd, W * y", "# TODO: is this worth it? We're paying a matrix inverse in exchange", "w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i],", "the negative log-likelihood tracker prev_nll = cur_nll # DEBUGGING if verbose: print('\\tbeta: [{0:.4f},", "= initial_values else: beta = initial_values['beta'] prior_prob = initial_values['c'] u = initial_values['u'] prev_nll", "norm of primal and dual residuals # This is the varying penalty extension", "verbose: print('Calculating degrees of freedom') # Create a grid structure out of the", "matrix of weights. We use a tridiagonal representation # of K. Kd =", "run parameters to use for warm-starting the next iteration initial_values = results #", "converge, max_steps, verbose, alpha, inflate, initial_values=None): '''Solve for u using a super fast", "the dual via coordinate descent u = self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose", "np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm <", "/ a) #x_denominator = sparse.linalg.inv(W_over_a + L) # Update the step counter cur_step", "u0=u) # Get the signal probabilities prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob)", "u = self._u_lbfgsb(x, A, _lambda, verbose > 1, u0=u) elif dual_solver == 'admm':", "degrees of freedom of each final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = []", "[] w_trace = [] c_trace = [] results_trace = [] best_idx = None", "beta).sum() def _graph_fused_lasso(self, y, weights, _lambda, ntrails, trails, breakpoints, edges, converge, max_steps, verbose,", "np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda, _lambda) for _ in", "np.zeros(m) else: x = initial_values['x'] z = initial_values['z'] u = initial_values['u'] primal_trace =", "m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the signal", "results.x.max())) return results.x def _u_lbfgsb(self, x, A, _lambda, verbose, u0=None): '''Solve for u", "initial_values else: beta = initial_values['beta'] prior_prob = initial_values['c'] u = initial_values['u'] prev_nll =", "# z = initial_values['z'] # u = initial_values['u'] # n = y.shape[0] #", "len(y) m = D.shape[0] a = _lambda # step-size parameter # Set up", "= initial_values['t_dual'] primal_trace = [] dual_trace = [] converged = False cur_step =", "len(y) m = D.shape[0] a = inflate * _lambda # step-size parameter #", "lasso)') n = len(y) m = n - 1 a = _lambda #", "+ np.exp(beta)) - post_prob * beta).sum() def _graph_fused_lasso(self, y, weights, _lambda, ntrails, trails,", "converge and cur_step < max_steps: # Update each coordinate one at a time.", "cur_step += 1 prev_objective = cur_objective return u def _u_slsqp(self, x, A, _lambda,", "verbose > 1: print('\\tIndices to check {0} {1}'.format(len(to_check), check_map.shape)) # Loop until every", "max_steps self.solver.converge = converge self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights) if initial_values is", "r += A.T[coord] * prev_u - A.T[coord] * u[coord] # Track the change", "# self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int,", "calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1", "solution path of the generalized lasso to find the best lambda value.''' lambda_grid", "* z - u) x = np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) # Update", "idx[0] > 0: local_check.append((idx[0] - 1, idx[1])) # left if idx[0] < beta.shape[0]", "(by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx],", "AIC') # Get the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC", "'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations: {0}'.format(results.nfev)) print('\\t\\t\\tGradient evaluations: {0}'.format(results.njev)) print('\\t\\t\\tu: [{0},", "delta = np.abs(prev_nll - cur_nll) / (prev_nll + converge) if verbose > 1:", "D^T D) # where W is the diagonal matrix of weights. We use", "= alpha * Dx + (1 - alpha) * z # over-relax Dx", "local_check: if not check_map[local_idx] \\ and beta[local_idx] >= min_member \\ and beta[local_idx] <=", "y, W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating", "m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees", "D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method", "data, prior_prob): '''Calculate the complete-data sufficient statistics (weights vector).''' signal_weight = prior_prob *", "% 100 == 0: print('\\t\\t\\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return", "def _m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood of the betas given the weights", "the weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) return", "is None: beta = np.zeros(data.shape) prior_prob = np.exp(beta) / (1 + np.exp(beta)) u", "the dual via sequential least squares u = self._u_slsqp(x, A, _lambda, verbose >", "z # over-relaxation # Update constraint term r arg = s - t_dual", "value (correcting for finite sample size) bic_trace = np.zeros(lambda_grid.shape) # The BIC score", "s_new = D.dot(z_new) dual_residual_u = a * (z_new - z) dual_residual_t = a", "the weights and data.''' return (np.log(1 + np.exp(beta)) - post_prob * beta).sum() def", "primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self, y, weights, _lambda, D,", "u def _u_slsqp(self, x, A, _lambda, verbose, u0=None): '''Solve for u using sequential", "_ in u0] # Fit results = minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds,", "verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) # Get the next unchecked point", "- u) x = tridiagonal_solve(Kl, Ku, Kd, W * y + out) Dx", "slack variable for likelihood r = np.zeros(m) # penalty term s = np.zeros(m)", "a = _lambda # step-size parameter # Set up system involving graph Laplacian", "A, _lambda, verbose, u0=None): '''Solve for u using L-BFGS-B.''' if verbose: print('\\t\\tSolving u", "'''The negative log-likelihood function for a plateau.''' return -np.log(c * signal_dist.pdf(data) + (1.", "single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in p: weights[idx if grid_map is None else", "verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion about the", "Solve the dual via coordinate descent u = self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps,", "= False cur_step = 0 D_full = D while not converged and cur_step", "* (z_new - z) dual_residual_t = a * (s_new - s) z =", "u via Alternating Direction Method of Multipliers (1-D fused lasso)') n = len(y)", "None: u0 = np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda, _lambda)", "# Fit to the final values results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps,", "{0}'.format(delta)) # Increment the step counter cur_step += 1 # Update the negative", "#x_denominator = sparse.linalg.inv(W_over_a + L) # Update the step counter cur_step += 1", "this worth it? We're paying a matrix inverse in exchange for varying the", "to_check = deque(itertools.product(*[range(x) for x in beta.shape])) if edges is None else deque(range(len(beta)))", "y - (1. / weights) * penalties.T.dot(u) # Get the current log-likelihood cur_nll", "prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight", "# slack variable for likelihood r = np.zeros(m) # penalty term s =", "penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose - 1), admm_alpha, admm_inflate, initial_values=u)", "i best_plateaus = plateaus # Save the final run parameters to use for", "import sparse from scipy.stats import norm from scipy.optimize import minimize, minimize_scalar from scipy.sparse", "for a plateau.''' return -np.log(c * signal_dist.pdf(data) + (1. - c) * null_dist.pdf(data)).sum()", "primal_resnorm < converge_threshold # Update step-size parameter based on norm of primal and", "primal_residual_r # Check convergence primal_resnorm = np.sqrt((np.array([i for i in primal_residual_x] + [i", "the step counter cur_step += 1 # Update the negative log-likelihood tracker prev_nll", "= 0 D_full = D while not converged and cur_step < max_steps: #", "+ null_weight) return post_prob def _m_step(self, beta, prior_prob, post_prob, penalties, _lambda, converge, max_steps,", "# Initialize primal and dual variables from warm start if initial_values is None:", "_lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def", "is None else grid_map[idx[0], idx[1]]] = w posteriors = self._e_step(data, weights) weights =", "for finite sample size) bic_trace = np.zeros(lambda_grid.shape) # The BIC score for each", "admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion about the current iterate and coordinate descent", "* (n-2) + [a]) + W # diagonal entries Kl = np.array([-a] *", "than 2 dimensions unless edges are specified explicitly. ({0} given)'.format(len(beta.shape))) # Check the", "graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes", "data with the given penalty matrix.''' delta = converge + 1 if initial_values", "= np.zeros(lambda_grid.shape) # The AICc score for each lambda value (correcting for finite", "x_accel + u_dual + D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg) s_new = D.dot(z_new)", "= np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self):", "_e_step(self, data, prior_prob): '''Calculate the complete-data sufficient statistics (weights vector).''' signal_weight = prior_prob", "We use a tridiagonal representation # of K. Kd = np.array([a] + [2*a]", "c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'),", "if np.abs(beta).max() > 20: # beta = np.clip(beta, -20, 20) # u =", "Get the current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) # Track the convergence delta", "= [] best_idx = None best_plateaus = None flat_data = data.flatten() edges =", "'graph': # Back out beta from the dual solution beta = y -", "= stdev def pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean,", "def pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def", "finite sample size) bic_trace = np.zeros(lambda_grid.shape) # The BIC score for each lambda", "dof_trace[i] * (np.log(len(flat_data)) - np.log(2 * np.pi)) # Track the best model thus", "the best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) #", "100 == 0: print('\\t\\t\\tStep #{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) #", "max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None):", "return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist =", "data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev)", "idx[1])) # right if idx[1] > 0: local_check.append((idx[0], idx[1] - 1)) # down", "from functools import partial from collections import deque from pygfl.solver import TrailSolver class", "operator. K is the matrix (W + a D^T D) # where W", "(np.log(1 + np.exp(beta)) - post_prob * beta).sum() def _graph_fused_lasso(self, y, weights, _lambda, ntrails,", "* s # Projection to constraint set arg = x_accel + u_dual +", "verbose: print('\\tM-step...') # Find beta using an alternating Taylor approximation and convex optimization", "u via Coordinate Descent') u = u0 if u0 is not None else", "* (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm) solver ##", "edges, ntrails, trails, breakpoints, weights=weights) if initial_values is not None: self.solver.beta = initial_values['beta']", "(1+exp_beta)**2 * post_prob / exp_beta + beta - (1 + exp_beta) if verbose", "the change in the objective function value cur_objective = _u_objective_func(u, x, A) delta", "= single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in p: weights[idx if grid_map is None", "vector for efficiency weights = 0.5 * exp_beta / (1 + exp_beta)**2 y", "initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self, y,", "cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion about", "null_dist if penalties_cross_x is None: self.penalties_cross_x = np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters", "= np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The AIC score for each", "currently support more than 2 dimensions unless edges are specified explicitly. ({0} given)'.format(len(beta.shape)))", "verbose: print('\\t\\tSolving u via Alternating Direction Method of Multipliers') n = len(y) m", "+ k * (ln(n) - ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i]", "beta.shape[0] - 1: local_check.append(idx[0] + 1) # right # 2d case -- check", "delta)) # Increment the step counter and update the previous objective value cur_step", "represented as a vector for efficiency weights = 0.5 * exp_beta / (1", "= (_lambda - np.abs(arg) / 2.).clip(0) if adaptive else _lambda r = _soft_threshold(arg,", "> 0: local_check.append(idx[0] - 1) # left if idx[0] < beta.shape[0] - 1:", "subset of grid points for this plateau if grid_map is not None: plateau_data", "max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series", "beta = np.clip(beta, -20, 20) # u = None else: raise Exception('Unknown solver:", "compute the cross-product D^T x, where D is the first-differences matrix.''' return -np.ediff1d(x,", "m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the", "Multipliers') n = len(y) m = D.shape[0] a = _lambda # step-size parameter", "np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def run(self,", "flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self, w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c)", "flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self, w, beta, c,", "= 0.5 * exp_beta / (1 + exp_beta)**2 y = (1+exp_beta)**2 * post_prob", "cur_step += 1 if verbose and cur_step % 100 == 0: print('\\t\\t\\tStep #{0}:", "Edge case -- If we went through all the indices without reaching an", "for each lambda value dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom of", "verbose=0): '''Calculate the plateaus (degrees of freedom) of a 1d or 2d grid", "np.abs(u['r']).sum() # Track the change in log-likelihood to see if we've converged delta", "aicc_trace[i], bic_trace[i])) if verbose: print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC: {2},", "add_step(self, w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters =", "with known mean and stdev. ''' def __init__(self, mean, stdev): self.mean = mean", "scipy.sparse import csc_matrix, linalg as sla from functools import partial from collections import", "= np.sqrt((np.array([i for i in primal_residual_x] + [i for i in primal_residual_r])**2).mean()) dual_resnorm", "> 1: print('\\t\\tM-step delta: {0}'.format(delta)) # Increment the step counter cur_step += 1", "is None: # beta = np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1], dtype='double') #", "# Update x out = _1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl, Ku, Kd,", "'admm': # Get the negative log-likelihood of the data given our new parameters", "1) # left if idx[0] < beta.shape[0] - 1: local_check.append(idx[0] + 1) #", "diagonal matrix, represented as a vector for efficiency weights = 0.5 * exp_beta", "= np.sqrt(weights) * y A = (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights", "# Update the step counter cur_step += 1 if verbose and cur_step %", "_1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T x, where D is the first-differences matrix.'''", "the 1-D fused lasso case.''' if verbose: print('\\t\\tSolving u via Alternating Direction Method", "and dual variables from warm start if initial_values is None: # Graph Laplacian", "''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int", "been checked while to_check: if verbose > 1: print('\\t\\tPlateau #{0}'.format(len(plateaus) + 1)) #", "local_lambda / a) r_accel = alpha * r + (1 - alpha) *", "i,(level,p) in enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1)) # Get the subset of grid", "return u def _u_slsqp(self, x, A, _lambda, verbose, u0=None): '''Solve for u using", "x, A, _lambda, verbose, u0=None): '''Solve for u using L-BFGS-B.''' if verbose: print('\\t\\tSolving", "= plateaus # Save the final run parameters to use for warm-starting the", "np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The AIC score for each lambda", "= (A * A).sum(axis=0) r = x - A.dot(u) delta = converge +", "np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1)) # Get the subset", "# self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), #", "* n) z = np.zeros(m) u = np.zeros(m) else: x = initial_values['x'] z", "Direction Method of Multipliers (1-D fused lasso)') n = len(y) m = n", "final run parameters to use for warm-starting the next iteration initial_values = results", "cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm for the", "scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2", "max_steps, converge, beta, z, u) # return {'beta': beta, 'z': z, 'u': u", "a * weights * y + D.T.dot(a * z - u) x =", "each coordinate one at a time. for coord in range(len(u)): prev_u = u[coord]", "self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u) u = self._u_admm_lucache(y,", "Update u u = (u + a * primal_residual).clip(-_lambda, _lambda) # Check convergence", "for efficiency weights = 0.5 * exp_beta / (1 + exp_beta)**2 y =", "verbose: print('\\t\\tSolving u via L-BFGS-B') if u0 is None: u0 = np.zeros(A.shape[1]) #", "> 0: local_check.append((idx[0] - 1, idx[1])) # left if idx[0] < beta.shape[0] -", "via L-BFGS-B') if u0 is None: u0 = np.zeros(A.shape[1]) # Create our box", "data.flatten() edges = penalties[3] if dual_solver == 'graph' else None if grid_data is", "* np.pi)) # Track the best model thus far if best_idx is None", "#{0}'.format(i+1)) # Get the subset of grid points for this plateau if grid_map", "y A = (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights = (prior_prob *", "z Dx_hat = alpha * Dx + (1 - alpha) * z #", "def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None):", "lu_factor = initial_values['lu_factor'] x = initial_values['x'] z = initial_values['z'] r = initial_values['r'] s", "''' Alternating Second-order Taylor-series expansion about the current iterate and coordinate descent to", "'''Follows the solution path of the generalized lasso to find the best lambda", "flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double,", "indices without reaching an unchecked one. if check_map[idx]: break # Create the plateau", "= {1}'.format(i, _lambda)) # Clear out all the info from the previous run", "log-likelihood tracker prev_nll = cur_nll return beta, u def _m_log_likelihood(self, post_prob, beta): '''Calculate", "free parameters in the grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] =", "= _u_objective_func(u, x, A) delta = np.abs(prev_objective - cur_objective) / (prev_objective + converge)", "+ 2 * dof return {'x': x, 'z': z, 'u': u, 'dof': dof,", "minimize_scalar from scipy.sparse import csc_matrix, linalg as sla from functools import partial from", "np.zeros(lambda_grid.shape) beta_trace = [] u_trace = [] w_trace = [] c_trace = []", "distribution with known mean and stdev. ''' def __init__(self, mean, stdev): self.mean =", "dual_residual = a * D.T.dot(z_new - z) z = z_new primal_residual = Dx_relaxed", "= results['beta'][grid_map[grid_map != -1]] else: grid_points = results['beta'].reshape(data.shape) # Count the number of", "dual solution beta = y - (1. / weights) * penalties.T.dot(u) # Get", "A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000}) if verbose: print('\\t\\t\\t{0}'.format(results.message)) print('\\t\\t\\tFunction evaluations:", "(dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >=", "so it's not re-checked unnecessarily check_map[local_idx] = True # Add it to the", "delta > converge and cur_step < max_steps: if verbose > 1: print('\\t\\tM-Step iteration", "2d cases for now else: raise Exception('Degrees of freedom calculation does not currently", "A) delta = np.abs(prev_objective - cur_objective) / (prev_objective + converge) if verbose and", "the diagonal # Initialize primal and dual variables if initial_values is None: x", "from scipy.sparse import csc_matrix, linalg as sla from functools import partial from collections", "u = self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate,", "in linear time.''' to_check = deque(itertools.product(*[range(x) for x in beta.shape])) if edges is", "delta = converge + 1 if initial_values is None: beta = np.zeros(data.shape) prior_prob", "of the data given the weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight =", "r = s else: lu_factor = initial_values['lu_factor'] x = initial_values['x'] z = initial_values['z']", "+= 1 # Update the negative log-likelihood tracker prev_nll = cur_nll # DEBUGGING", "/ l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u)) r += A.T[coord] * prev_u -", "1 prev_objective = cur_objective return u def _u_slsqp(self, x, A, _lambda, verbose, u0=None):", "variable for penalty u_dual = np.zeros(n) # scaled dual variable for constraint x", "True # Add it to the plateau and the list of local unchecked", "aic_trace[i] = 2. * dof_trace[i] - 2. * log_likelihood_trace[i] # Calculate AICc =", "'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda,", "unchecked one. if check_map[idx]: break # Create the plateau and calculate the inclusion", "= a * D.T.dot(z_new - z) z = z_new primal_residual = Dx_relaxed -", "print('\\tM-step...') # Find beta using an alternating Taylor approximation and convex optimization (M-step)", "idx[0] < beta.shape[0] - 1: local_check.append(idx[0] + 1) # right # 2d case", "_lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method", "= np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if verbose: print('\\tPlateau #{0}'.format(i+1)) # Get the", "z = initial_values['z'] r = initial_values['r'] s = initial_values['s'] u_dual = initial_values['u_dual'] t_dual", "the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0} AIC: {1}", "+ 1)) # Get the next unchecked point on the grid idx =", "= sparse.linalg.inv(W_over_a + L) # Update the step counter cur_step += 1 if", "(1 - alpha) * z # over-relax Dx z_new = _soft_threshold(Dx_relaxed + u", "/ weights) * penalties.T.dot(u) # Get the current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta)", "weights and data.''' return (np.log(1 + np.exp(beta)) - post_prob * beta).sum() def _graph_fused_lasso(self,", "- 1: local_check.append(idx[0] + 1) # right # 2d case -- check left,", "primal_resnorm > 10 * dual_resnorm else 0.5 Kd = np.array([a] + [2*a] *", "penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x']", "slack variable for penalty u_dual = np.zeros(n) # scaled dual variable for constraint", "u[coord] = min(_lambda, max(-_lambda, next_u)) r += A.T[coord] * prev_u - A.T[coord] *", "y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using", "representation # of K. Kd = np.array([a] + [2*a] * (n-2) + [a])", "not None: local_check.extend(edges[idx]) # 1d case -- check left and right elif len(beta.shape)", "z_new s = s_new # Dual update primal_residual_x = x_accel - z primal_residual_r", "self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm': # Get the negative log-likelihood of the", "+ u_dual + D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u", "{0} {1}'.format(len(to_check), check_map.shape)) # Loop until every beta index has been checked while", "idx[1] < beta.shape[1] - 1: local_check.append((idx[0], idx[1] + 1)) # up # Only", "- 1.) # Calculate BIC = -2ln(L) + k * (ln(n) - ln(2pi))", ">= 1e-4).sum())) # Return the results of the run return {'beta': beta, 'u':", "[2*a] * (n-2) + [a]) + W # diagonal entries Kl = np.array([-a]", "* weights * y + D.T.dot(a * z - u) x = np.linalg.solve(x_denominator,", "down elif len(beta.shape) == 2: if idx[0] > 0: local_check.append((idx[0] - 1, idx[1]))", "mean, stdev): self.mean = mean self.stdev = stdev def pdf(self, data): return norm.pdf(data,", "# Update z Dx_relaxed = alpha * Dx + (1 - alpha) *", "* primal_residual # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual", "converge_threshold and primal_resnorm < converge_threshold if primal_resnorm > 5 * dual_resnorm: a *=", "penalty extension to standard ADMM a *= 2 if primal_resnorm > 10 *", "alpha * r + (1 - alpha) * s # Projection to constraint", "> 0: local_check.append((idx[0], idx[1] - 1)) # down if idx[1] < beta.shape[1] -", "step counter cur_step += 1 # Update the negative log-likelihood tracker prev_nll =", "if dual_solver == 'graph' else None if grid_data is not None: grid_points =", "if verbose: print('#{0} Lambda = {1}'.format(i, _lambda)) # Clear out all the info", "is the diagonal matrix of weights. We use a tridiagonal representation # of", "Save the final run parameters to use for warm-starting the next iteration initial_values", "c_double, c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver =", "= 0 while not converged and cur_step < max_steps: # Update x x_numerator", "= min(_lambda, max(-_lambda, next_u)) r += A.T[coord] * prev_u - A.T[coord] * u[coord]", "np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters = [] self.beta_iters = [] self.c_iters =", "initial_values['z'] # u = initial_values['u'] # n = y.shape[0] # self.graphfl_weight(n, y, weights,", "for the 1-D fused lasso case.''' if verbose: print('\\t\\tSolving u via Alternating Direction", "= u[coord] next_u = prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda,", "x x = (weights * y + a * (z - u_dual)) /", "cd_converge, cd_max_steps, max(0, verbose - 1), admm_alpha, admm_inflate, initial_values=u) beta = u['beta'] #", "{0}'.format(results.njev)) print('\\t\\t\\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self, x, A, _lambda, verbose,", "u0] # Fit results = minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose})", "(correcting for finite sample size) bic_trace = np.zeros(lambda_grid.shape) # The BIC score for", "= False cur_step = 0 while not converged and cur_step < max_steps: #", "BIC score for each lambda value dof_trace = np.zeros(lambda_grid.shape) # The degrees of", "# likelihood term z = np.zeros(n) # slack variable for likelihood r =", "negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC = 2k - 2ln(L)", "of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's indices plateaus.append((val, cur_plateau))", "that this method only works for the 1-D fused lasso case.''' if verbose:", "= initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self," ]
[ "nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True,", "batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not eval_flag: decode_length = gt_decode_length else: decode_length =", "var_predictions = [] str_pred_logits = [] str_predictions = [] predictions = [] for", "cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input", "+= F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1,", "= batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices]", "self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr", "gt_output = batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output", "decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding =", "if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input", "= self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices =", "* 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size *", "elif args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer", "batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions = [] df_pred_logits = [] df_predictions =", "batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size,", "clip_grad_norm import torch.nn.functional as F import numpy as np from .data_utils import data_utils", "clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data =", "for batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx in", "dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding)", "* 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions =", "= str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss + var_loss", "self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder =", "cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input ==", "0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1,", "if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else:", "torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input)", "torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1)", "if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output)", "= target_code_output[:, step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits =", "cur_var_pred_logits = cur_copy_pred_logits - (1.0 - output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] *", "2), 'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0)", "= torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input", "def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr'] = self.lr", "if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input,", "= torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask =", "encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output = batch_input['gt'] target_code_output =", "decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask * 1e9 attention_weights =", "nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output =", "range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 =", "# training self.loss = nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr)", "batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]]", "[] var_predictions = [] str_pred_logits = [] str_predictions = [] predictions = []", "* ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0 - output_str_mask) * 1e9", "= batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len =", "input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx,", "self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear", "cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding,", "decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits =", "cur_predictions = cur_code_predictions if self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions)", "finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] =", "= nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size,", "torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions = [] df_pred_logits = [] df_predictions", "cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float() *", "undefined: ', args.optimizer) def init_weights(self, param_init): for param in self.parameters(): nn.init.uniform_(param, -param_init, param_init)", "cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions = target_code_output[:, step]", "== data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda()", "else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0 - output_df_mask) * 1e9", "self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len", "output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices =", ":] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding =", "self.LSTM_hidden_size * 2) if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size,", "self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor =", "dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding =", "cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits =", "pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] *", "self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout", "batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input", "(batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input", "decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask", "sketch_predictions = cur_code_predictions else: sketch_predictions = target_code_output[:, step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits", "self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate,", "encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if", "dim=0) df_predictions = df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits,", "encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask =", "range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx,", "= decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy:", "nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size,", "cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding =", "batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding)", "0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss + var_loss + str_loss)", "self.cuda_flag: finished = finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding", "batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for", "= code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask] * batch_size,", "batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding)", "code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices", "not eval_flag: decode_length = gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1,", "batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input", "self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) #", "dim=0) str_predictions = str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss", "self.lr def train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input,", "= batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not", "nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1", "self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits", "attention_logits - encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector =", "batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else:", "batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding", "target_code_output.size()[1] if not eval_flag: decode_length = gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch =", "import torch import torch.nn as nn from torch.autograd import Variable from torch import", "cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size *", "nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding", "self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input =", "if self.code_context: batch_code_output_context_embedding = [] for batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding", "[] df_predictions = [] var_pred_logits = [] var_predictions = [] str_pred_logits = []", "= self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2))", "= self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits -", "= cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input", "cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step] decoder_input = gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding", "if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64)", "= nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size", "torch.max(finished, cur_finished) if torch.sum(finished) == batch_size and step >= gt_decode_length - 1: break", "ValueError('optimizer undefined: ', args.optimizer) def init_weights(self, param_init): for param in self.parameters(): nn.init.uniform_(param, -param_init,", "ignore_index=-1) total_loss += (df_loss + var_loss + str_loss) / 3.0 predictions = torch.stack(predictions,", "if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size)", "import numpy as np from .data_utils import data_utils from .modules import mlp class", "cur_code_predictions else: sketch_predictions = target_code_output[:, step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding,", "args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab", "optim from torch.nn.utils import clip_grad_norm import torch.nn.functional as F import numpy as np", "nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size *", "= nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor =", "[] str_predictions = [] predictions = [] for step in range(decode_length): if self.hierarchy:", "1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions = target_code_output[:,", "self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits =", "num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2) if", "self.code_context = args.code_context self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len", "decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output", "= torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else:", "= torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions", "torch.optim as optim from torch.nn.utils import clip_grad_norm import torch.nn.functional as F import numpy", "code_vocab self.batch_size = args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size", "attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask *", "gt_decode_length - 1: break total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits =", "= torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding =", "[data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding =", "cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding =", "self.code_vocab = code_vocab self.batch_size = args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size", "== 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(),", "batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices", "2, self.LSTM_hidden_size * 2) if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 +", "= attention_logits - encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector", "decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output)", "finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0 - code_output_mask) * 1e9 cur_code_predictions", "= [] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0)", "__init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size", "== 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined: ', args.optimizer) def", "torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input)", "+ finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0 - code_output_mask) * 1e9", "= args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if", "data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy: decoder_input =", "batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in range(batch_size):", "batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions =", "F import numpy as np from .data_utils import data_utils from .modules import mlp", "batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if", "self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda()", "decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0", "step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy: if step <", "if self.cuda_flag: finished = finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag)", "< gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions", "== data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if torch.sum(finished) == batch_size and step >=", "= args.num_plot_types self.word_vocab = word_vocab self.code_vocab = code_vocab self.batch_size = args.batch_size self.embedding_size =", "decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state)", "= args.gradient_clip self.lr = args.lr self.dropout_rate = args.dropout_rate self.nl = args.nl self.use_comments =", "lr_decay_rate): self.lr *= lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self):", "import torch.nn.functional as F import numpy as np from .data_utils import data_utils from", "if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)]", "output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits", "df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions,", "* 2, self.LSTM_hidden_size * 2) if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2", "= torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits =", "cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step] decoder_input = gt_output[:,", "torch.sum(finished) == batch_size and step >= gt_decode_length - 1: break total_loss = 0.0", "self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding", "batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0", "else: decoder_input_sketch = target_code_output[:, step] decoder_input = gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding =", "max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices", "self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False): batch_size =", "= [] var_predictions = [] str_pred_logits = [] str_predictions = [] predictions =", "= nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 +", "in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding,", "cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished =", "* (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding =", "nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd':", "self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output +", "args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers", "= cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0 - output_var_mask)", "[] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else:", "torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch)", "= args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr = args.lr self.dropout_rate =", "dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention", "args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd': self.optimizer =", "param_group['lr'] = self.lr def train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def", "torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in", "df_pred_logits = [] df_predictions = [] var_pred_logits = [] var_predictions = [] str_pred_logits", "code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions", "[] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding", "gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits)", "= code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0) total_loss", "decoder_input_sketch_embedding = [] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding,", "ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions", "hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True)", "ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions,", "encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if", "code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0) total_loss +=", "total_loss += (df_loss + var_loss + str_loss) / 3.0 predictions = torch.stack(predictions, dim=0)", "param_group in self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(),", "* ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions if", "encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state", "self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output", "if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits", "= nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer ==", "2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector)", "str_predictions = str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss +", "nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size,", "= batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :]", "= args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout =", "= nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding,", "nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if", "[] for step in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding],", "= torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits", "args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer =", "self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size)", "self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size", "self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask,", "= torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if", "batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits", "if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output", "cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0 - output_var_mask) *", "cur_finished) if torch.sum(finished) == batch_size and step >= gt_decode_length - 1: break total_loss", "(batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask =", "code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0)", "torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss +=", "self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding =", "batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output']", "target_code_output[:, step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1)", "== data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state =", "batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention =", "for batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:,", "cur_code_output_context_embedding = [] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) <", "str_pred_logits = [] str_predictions = [] predictions = [] for step in range(decode_length):", "batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask", "(1.0 - output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits", "= [] str_pred_logits = [] str_predictions = [] predictions = [] for step", "+= (df_loss + var_loss + str_loss) / 3.0 predictions = torch.stack(predictions, dim=0) predictions", "nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 +", "args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr = args.lr self.dropout_rate = args.dropout_rate", "= args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers =", "forward(self, batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl']", "= (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask,", "self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID]", "batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions = [] df_pred_logits =", "nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1)", "* 4 + self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size)", "dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1,", "0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output,", "attention_logits = attention_logits - encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights)", "import data_utils from .modules import mlp class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab):", "= word_vocab self.code_vocab = code_vocab self.batch_size = args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size =", "', args.optimizer) def init_weights(self, param_init): for param in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def", "- output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits =", "optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer ==", "num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if", "cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0 -", "= self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step", "= args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab self.code_vocab = code_vocab self.batch_size =", "if eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions = target_code_output[:, step] if self.hierarchy: if", "gt_decode_length = target_code_output.size()[1] if not eval_flag: decode_length = gt_decode_length else: decode_length = self.max_decode_len", "nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices']", "= finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices)", "cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding],", "decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else:", "= attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights", "nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0)", "- len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding =", "lr=self.lr) elif args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop':", "(1.0 - output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits", "2, 0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits,", "var_predictions = torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1)", "torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input =", "if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions,", "= gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID", "0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits", "eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions = target_code_output[:, step] if self.hierarchy: if self.copy_mechanism:", "torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding,", "PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size =", "len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding)", "args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size,", "nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size,", "self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step()", "if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64)", "args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len", "as np from .data_utils import data_utils from .modules import mlp class PlotCodeGenerator(nn.Module): def", "output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if", "target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask", "torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in", "batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding,", "else: sketch_predictions = target_code_output[:, step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2))", "data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output,", "= self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output", "sketch_predictions = target_code_output[:, step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits", "(max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding,", "dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding],", "self.code_context: batch_code_output_context_embedding = [] for batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding =", "max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] * (max_code_mask_len -", "= nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4", "1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished", "batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding", "args.code_context self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len", "cuda import torch.optim as optim from torch.nn.utils import clip_grad_norm import torch.nn.functional as F", "code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length", "cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits", "as optim from torch.nn.utils import clip_grad_norm import torch.nn.functional as F import numpy as", "decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy: if", "= optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer", "= df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0) df_loss", "= torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits", "code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types", "batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices']", "for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding", "for batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding", "decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions", "= torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished)", "= torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss", "2, 0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits,", "= [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding", "args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr = args.lr", "= args.nl self.use_comments = args.use_comments self.code_context = args.code_context self.hierarchy = args.hierarchy self.copy_mechanism =", "= torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag:", ">= gt_decode_length - 1: break total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits", "self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr = args.lr self.dropout_rate = args.dropout_rate self.nl", "cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding))", "batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask =", "(1.0 - output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if", "in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for param_group", "data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1,", "= decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding if self.copy_mechanism:", "batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input ==", "code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input ==", "lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr'] = self.lr def", "= nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder =", "max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :,", "import cuda import torch.optim as optim from torch.nn.utils import clip_grad_norm import torch.nn.functional as", "[] predictions = [] for step in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state =", "= batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask =", "batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding)", "= torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step] decoder_input = gt_output[:, step] if", "decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention", "var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits =", "var_pred_logits = [] var_predictions = [] str_pred_logits = [] str_predictions = [] predictions", "torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag: finished = finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices", "batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask", "2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2,", "<reponame>Jungyhuk/plotcoder import torch import torch.nn as nn from torch.autograd import Variable from torch", "* 2, self.embedding_size) # training self.loss = nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer", "= nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector =", "self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding", "= nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size *", "batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask']", "[] var_pred_logits = [] var_predictions = [] str_pred_logits = [] str_predictions = []", "= torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1)", "+ self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size", "= code_vocab self.batch_size = args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size =", "if self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2))", "self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size", "torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits +", "self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0 - output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1]", "1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding =", "dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2) if not", "< gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy: if step < gt_decode_length:", "code_predictions = code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits =", "output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits", "F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss + var_loss + str_loss) / 3.0 predictions", "self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size *", "code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output = batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output", "(df_loss + var_loss + str_loss) / 3.0 predictions = torch.stack(predictions, dim=0) predictions =", "= cur_copy_pred_logits - (1.0 - output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions", "df_predictions = [] var_pred_logits = [] var_predictions = [] str_pred_logits = [] str_predictions", "dim=0) code_predictions = code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits", "batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if", "not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear =", "data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding", "else: raise ValueError('optimizer undefined: ', args.optimizer) def init_weights(self, param_init): for param in self.parameters():", "self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions,", "pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag: finished =", "bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2) if not self.nl_code_linking: self.code_ctx_linear", "self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = [] for batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices']", "= cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding],", "* 2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size,", "args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism:", "dim=0) var_predictions = var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits,", "cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding,", "step] decoder_input = gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx in", "self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx,", "- self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0)", "((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0 - output_var_mask) * 1e9 cur_var_predictions", "var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0)", "= 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions =", "finished = finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding =", "* batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size):", "output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding", "= var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0) var_loss", "batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx in output_code_indices:", "torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding", "args.use_comments self.code_context = args.code_context self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking", "= cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding,", "= args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr =", "pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0 - code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1]", "torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step] decoder_input =", "[] str_pred_logits = [] str_predictions = [] predictions = [] for step in", "= code_hidden_state gt_output = batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output =", "args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size,", "target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask", "for step in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1),", "= encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output = batch_input['gt'] target_code_output", "for param in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate", "code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0)", "= args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab self.code_vocab =", "in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch)", "= batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:,", "= nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 +", "self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab self.code_vocab = code_vocab self.batch_size", "= args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers =", "+ self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear =", "+ cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input", "torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding", "np from .data_utils import data_utils from .modules import mlp class PlotCodeGenerator(nn.Module): def __init__(self,", "code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask] * batch_size, dim=0)", "from torch import cuda import torch.optim as optim from torch.nn.utils import clip_grad_norm import", "batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder", "encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state", "decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for batch_idx in range(batch_size):", "= nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear =", "2, 0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits,", "df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2,", "self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2,", "args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers", "torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input", "= torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state =", "nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training self.loss", "nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding", "batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask", "attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output", "str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions)", "= torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx", "self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training self.loss = nn.CrossEntropyLoss() if args.optimizer", "data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0 - output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1]", "optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer", "= torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :])", "if self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions", "dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1,", "2, 0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits,", "step in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state)", "decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for batch_idx in", "str_predictions = torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1)", "decoder_copy_output = decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits =", "if torch.sum(finished) == batch_size and step >= gt_decode_length - 1: break total_loss =", "= self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits)", "in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1", "(decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if torch.sum(finished) == batch_size and step", "0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output,", "batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding =", "= args.code_context self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len =", "gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if", "self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined: ', args.optimizer) def init_weights(self, param_init):", "'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding =", "= torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished", "decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding", "df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0) df_loss =", "self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding = []", "dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = [] for batch_idx in range(batch_size):", "batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]]", "[] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for", "= batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in", "decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits =", "torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions =", "self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size)", "args.lr self.dropout_rate = args.dropout_rate self.nl = args.nl self.use_comments = args.use_comments self.code_context = args.code_context", "= [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking:", "torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch =", "2) if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) else:", "batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len", "import mlp class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag =", "1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask =", "= nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding,", "dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions", "decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step < gt_decode_length:", "batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding =", "batch_size and step >= gt_decode_length - 1: break total_loss = 0.0 code_pred_logits =", "code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask", "data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if torch.sum(finished) == batch_size and step >= gt_decode_length", "batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding =", "df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions =", "'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr)", "self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0", "target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions =", "self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len", "dim=0) if self.cuda_flag: finished = finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int',", "torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if", "from torch.autograd import Variable from torch import cuda import torch.optim as optim from", "= nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear =", "else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1)", "break total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0)", "self).__init__() self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types", "self.word_vocab = word_vocab self.code_vocab = code_vocab self.batch_size = args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size", ".data_utils import data_utils from .modules import mlp class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab,", "mlp class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda", "- output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits =", "1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input =", "self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr = args.lr self.dropout_rate", "for batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:,", "+ input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits", ".modules import mlp class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag", "torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask", "* data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size,", "2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2,", "self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size)", "code_predictions = [] df_pred_logits = [] df_predictions = [] var_pred_logits = [] var_predictions", "= batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not eval_flag: decode_length = gt_decode_length else: decode_length", "finished = torch.max(finished, cur_finished) if torch.sum(finished) == batch_size and step >= gt_decode_length -", "batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2) if not self.nl_code_linking:", "< max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] * (max_code_mask_len", "= torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding =", "decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx],", "self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask", "self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector)", "step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else:", "args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate)", "+ self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers,", "batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx,", "torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask * 1e9 attention_weights", "dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float()", "= nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size", "dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask]", "attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding", "torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions =", "= (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask,", "args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len", "batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not eval_flag: decode_length = gt_decode_length", "num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True)", "= target_code_output.size()[1] if not eval_flag: decode_length = gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch", "self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers", "self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder", "batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]]", "= self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output", "* pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0 - code_output_mask) * 1e9 cur_code_predictions =", "1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions", "word_vocab self.code_vocab = code_vocab self.batch_size = args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size", "- encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output,", "cur_copy_pred_logits - (1.0 - output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions ==", "bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder =", "== data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input ==", "self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab self.code_vocab", "decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len =", "= optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined: ', args.optimizer) def init_weights(self, param_init): for", "cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0", "1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2))", "= decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1)", "def train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels,", "= self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0 - output_df_mask) * 1e9 cur_df_predictions =", "step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions)", "torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits =", "input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits =", "2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate,", "gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx],", "dim=0) decoder_input_embedding = [] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding =", "decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits", "input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding,", "in self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip)", "batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices", "(batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state", "self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = []", "- (1.0 - code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions =", "dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in range(batch_size):", "step >= gt_decode_length - 1: break total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0)", "* 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2),", "+ cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1)", "in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else:", "= batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask,", "batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding =", "dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input =", "gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions =", "output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx,", "args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr = args.lr self.dropout_rate = args.dropout_rate self.nl = args.nl", "= nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear =", "= torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions = [] df_pred_logits = []", "super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types =", "else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits =", "= self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float())", "data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float())", "= batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:,", "batch_size, dim=0) if self.cuda_flag: finished = finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))),", "if args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd': self.optimizer", "== data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0 - output_var_mask) * 1e9 cur_var_predictions =", "dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1,", "* 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch =", "self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions)", "pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding =", "= [] for step in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding,", "cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input", "- (1.0 - output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long())", "self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output =", "torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions =", "self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding =", "dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding =", "cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0", "encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1,", "encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output = batch_input['gt']", "= batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking:", "if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output)", "== data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0 - output_str_mask) * 1e9 cur_str_predictions =", "code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits)", "nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input)", "= batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float()", "self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices =", "and step >= gt_decode_length - 1: break total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits,", "var_predictions = var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0)", "self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len)", "args.gradient_clip self.lr = args.lr self.dropout_rate = args.dropout_rate self.nl = args.nl self.use_comments = args.use_comments", "= torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions", "self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size", "= cur_code_predictions if self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions)", "decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding", "torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding)", "= self.lr def train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self,", "finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding", "range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0)", "param_init): for param in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr *=", "self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask", "torch.autograd import Variable from torch import cuda import torch.optim as optim from torch.nn.utils", "cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if torch.sum(finished) == batch_size", "1: break total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2,", "decoder_input = cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions)", "+= [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding", "cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0 - output_df_mask) *", "+ var_loss + str_loss) / 3.0 predictions = torch.stack(predictions, dim=0) predictions = predictions.permute(1,", "nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2,", "= self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context:", "str_predictions = [] predictions = [] for step in range(decode_length): if self.hierarchy: decoder_output,", "self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2) if not self.nl_code_linking: self.code_ctx_linear =", "= cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0 - output_str_mask)", "word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size", "var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions)", "lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self): if self.gradient_clip >", "= [] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len", "batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for", "eval_flag: decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions)", "self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab self.code_vocab = code_vocab self.batch_size = args.batch_size self.embedding_size", "= torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask * 1e9", "= cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input =", "decoder_hidden_state = code_hidden_state gt_output = batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output", "else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len)", "pad_mask = torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag: finished = finished.cuda() pad_mask =", "torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding", "cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits =", "= [] df_predictions = [] var_pred_logits = [] var_predictions = [] str_pred_logits =", "= torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions", "training self.loss = nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif", "+ self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size,", "= args.dropout_rate self.nl = args.nl self.use_comments = args.use_comments self.code_context = args.code_context self.hierarchy =", "= args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding =", "self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda()", "+ input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits =", "== 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(),", "cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding", "var_loss + str_loss) / 3.0 predictions = torch.stack(predictions, dim=0) predictions = predictions.permute(1, 0)", "(1.0 - code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions", "self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers,", "- code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions else:", "batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask']", "/ 3.0 predictions = torch.stack(predictions, dim=0) predictions = predictions.permute(1, 0) return total_loss, code_pred_logits,", "self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions =", "1e9 pad_mask = torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag: finished = finished.cuda() pad_mask", "= [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :,", ":, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding", "self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding", "decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits", "as nn from torch.autograd import Variable from torch import cuda import torch.optim as", "* batch_size, dim=0) if self.cuda_flag: finished = finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices =", "= cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0 - output_df_mask)", "F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions", "target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions =", "num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers,", "batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input ==", "in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx,", "= decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions =", "args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab self.code_vocab = code_vocab", "- output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag:", "= args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip =", "batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = [] for batch_idx in", ":, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding", "args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size =", "= cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0 - code_output_mask)", "cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input =", "dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1,", "target_code_output[:, step] decoder_input = gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx", "batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding =", "args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined: ', args.optimizer)", "* 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits -", "self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor", "eval_flag: decode_length = gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64)", "code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits)", "torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions =", "nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for param_group in self.optimizer.param_groups:", "= torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch", "self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits", "torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits =", "range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output,", "cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0 - output_str_mask) *", "* ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0 - output_var_mask) * 1e9", "batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len", "= self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input =", "args.nl self.use_comments = args.use_comments self.code_context = args.code_context self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism", "self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size", "if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False): batch_size", "cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished,", "self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training self.loss = nn.CrossEntropyLoss() if", "target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0)", "self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size -", "data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float())", "batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding", "self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len,", "dim=1) code_pred_logits = [] code_predictions = [] df_pred_logits = [] df_predictions = []", "code_pred_logits = [] code_predictions = [] df_pred_logits = [] df_predictions = [] var_pred_logits", "* 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers,", "- (1.0 - output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long())", "self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size)", "from torch.nn.utils import clip_grad_norm import torch.nn.functional as F import numpy as np from", "decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask =", "decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else:", "encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding =", "= nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output)", "def forward(self, batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input =", "in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1", "target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask", "self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab", "self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else:", "= batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output =", "self.lr = args.lr self.dropout_rate = args.dropout_rate self.nl = args.nl self.use_comments = args.use_comments self.code_context", "= 1e9 pad_mask = torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag: finished = finished.cuda()", "= torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state =", "cur_code_predictions if self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions)", "decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float()", "code_hidden_state gt_output = batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output']", "if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices", "self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size)", "lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined:", "= nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2) if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size", "code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output = batch_input['gt'] target_code_output = batch_input['code_output']", "self.nl = args.nl self.use_comments = args.use_comments self.code_context = args.code_context self.hierarchy = args.hierarchy self.copy_mechanism", "nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size +", "batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx,", "cur_code_pred_logits - (1.0 - code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions", "[] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len -", "self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for param_group in", "torch import cuda import torch.optim as optim from torch.nn.utils import clip_grad_norm import torch.nn.functional", "self.batch_size = args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers", "nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size", "self.embedding_size) # training self.loss = nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(),", "dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices =", "range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding", "self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip", "decoder_code_output = decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding if", "= cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float() * pad_mask", "cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0 -", "args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab self.code_vocab = code_vocab self.batch_size = args.batch_size", "batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask']", "= cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions = target_code_output[:, step] if", "decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch", "self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding", "if self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 =", "self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output = batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output']", "predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if torch.sum(finished) ==", "var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions,", "param_init) def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr'] =", "input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input", "= cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input =", "init_weights(self, param_init): for param in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr", "= torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx", "= args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding =", "* 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size *", "= cur_code_predictions else: sketch_predictions = target_code_output[:, step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits =", "0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1,", "- 1: break total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1,", "elif args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined: ',", "data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1,", "self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output =", "cur_copy_pred_logits - (1.0 - output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions ==", "= nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 +", "optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined: ', args.optimizer) def init_weights(self, param_init): for param", "= torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag:", "self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2,", "= decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1]", "step] if self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :])", "var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions, dim=0)", "raise ValueError('optimizer undefined: ', args.optimizer) def init_weights(self, param_init): for param in self.parameters(): nn.init.uniform_(param,", "cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input =", "cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions)", "'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr)", "torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag: finished", "str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0) str_loss =", "self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear", "self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2", "(batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask =", "= self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output = batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output =", "if not eval_flag: decode_length = gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size,", "str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions, dim=0)", "== batch_size and step >= gt_decode_length - 1: break total_loss = 0.0 code_pred_logits", "batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask']", "torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step] decoder_input = gt_output[:, step] if self.copy_mechanism:", "self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices']", "= decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits", "if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input,", "= [] predictions = [] for step in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state", "import clip_grad_norm import torch.nn.functional as F import numpy as np from .data_utils import", "[] code_predictions = [] df_pred_logits = [] df_predictions = [] var_pred_logits = []", "= [] str_predictions = [] predictions = [] for step in range(decode_length): if", "0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output,", "self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding if self.hierarchy:", "for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding =", "max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask] *", "[] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding =", "= [] var_pred_logits = [] var_predictions = [] str_pred_logits = [] str_predictions =", "decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits =", "= target_code_output[:, step] decoder_input = gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding = [] for", "args.optimizer) def init_weights(self, param_init): for param in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self,", "= torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding =", "target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask", "= batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length =", "0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding", "import Variable from torch import cuda import torch.optim as optim from torch.nn.utils import", "else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size *", "= self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions = []", "self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input", "= [] for batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for", "if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output", "self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size +", "import torch.nn as nn from torch.autograd import Variable from torch import cuda import", "output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not eval_flag: decode_length", "= self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output", "cur_copy_pred_logits - (1.0 - output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions ==", "decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step] decoder_input = gt_output[:, step]", "eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input)", "1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding =", "'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined: ', args.optimizer) def init_weights(self,", "torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished", "decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag:", "code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding", "= cur_copy_pred_logits - (1.0 - output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions", "= batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices =", ":] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding =", "= (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if torch.sum(finished) == batch_size and", "= torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1)", "cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if torch.sum(finished)", "self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2,", "= [] df_pred_logits = [] df_predictions = [] var_pred_logits = [] var_predictions =", "decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding,", "= nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else:", "== data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda()", "output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch", "= args.lr self.dropout_rate = args.dropout_rate self.nl = args.nl self.use_comments = args.use_comments self.code_context =", "nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size", "output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size", "in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding +=", "torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1)", "if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state", "[] for batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx", "batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx", "= code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = []", "self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits", "F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2,", "* 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size *", "== data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input ==", "def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size", "self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding", "= F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0)", "torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = [] for batch_idx", "((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy:", "decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output,", "dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate,", "pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask] * batch_size, dim=0) if", "self.use_comments = args.use_comments self.code_context = args.code_context self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking", "df_predictions = df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0)", "= torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step] decoder_input", "str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss + var_loss +", "torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding", "* 2) if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size)", "def init_weights(self, param_init): for param in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate):", "input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding)", "((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0 - output_str_mask) * 1e9 cur_str_predictions", "bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size", "encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context']", "cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding", "cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0 -", "decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step]", "var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2,", "batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx]", "* 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear =", "from .modules import mlp class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__()", "= torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = [] for", "target_str_output, ignore_index=-1) total_loss += (df_loss + var_loss + str_loss) / 3.0 predictions =", "= str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0) str_loss", "self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder =", "* 2), 'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding,", "else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits", "batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0", ":]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx,", "str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0)", "= args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab =", "== data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state =", "= nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices =", "batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx,", "data_utils from .modules import mlp class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator,", "batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = [] for batch_idx in range(batch_size): output_code_indices", "= self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding if", "- (1.0 - output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long())", "batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1)", "batch_first=True, bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True,", "batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input ==", "torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits -", "nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0)", "encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask =", "decoder_input = gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx in range(batch_size):", "nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder", "decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions", "[] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :]", "= nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate,", "= batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask =", "self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len", "= nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True)", "self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size) self.code_word_linear =", "self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data']", "+ self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size) self.code_word_linear", "self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size)", "hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size,", "= batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask =", "decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input", "* data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size,", "batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not eval_flag:", "decode_length = gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) *", "self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True,", "= self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = [] for batch_idx in range(batch_size): output_code_indices =", "== data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy: decoder_input", "df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits =", "= self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len =", "- self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size", "predictions = [] for step in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder(", "= torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions", "[] df_pred_logits = [] df_predictions = [] var_pred_logits = [] var_predictions = []", "self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output =", "self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1]", "= torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag:", "cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0 - output_df_mask) * 1e9 cur_df_predictions", "*= lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self): if self.gradient_clip", "= torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding],", "else: code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input", "self.lr *= lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self): if", "= encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding", "output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding)", "0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data", "= torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits", "df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions, dim=0)", "= self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output +", "cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1)", "decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input", "input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy:", "* 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits -", "(batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state", "3.0 predictions = torch.stack(predictions, dim=0) predictions = predictions.permute(1, 0) return total_loss, code_pred_logits, predictions", "= batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if", "output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if", "self.dropout_rate = args.dropout_rate self.nl = args.nl self.use_comments = args.use_comments self.code_context = args.code_context self.hierarchy", "batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]]", "self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions)", "batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = []", "hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True,", "= self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding =", "torch import torch.nn as nn from torch.autograd import Variable from torch import cuda", "= torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9", "self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step <", "var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0) var_loss =", "= [] code_predictions = [] df_pred_logits = [] df_predictions = [] var_pred_logits =", "= [] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0)", "if self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding", "torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished =", "self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size,", "batch_code_output_context_embedding = [] for batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = []", "self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output =", "in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for", "= data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0)", "for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size:", "cur_code_pred_logits = cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0 -", "= var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits", "df_predictions = torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1)", "from .data_utils import data_utils from .modules import mlp class PlotCodeGenerator(nn.Module): def __init__(self, args,", "numpy as np from .data_utils import data_utils from .modules import mlp class PlotCodeGenerator(nn.Module):", "= nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size)", "else: decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag:", "torch.nn.utils import clip_grad_norm import torch.nn.functional as F import numpy as np from .data_utils", "batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input)", "decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1)", "param in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for", "cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0 - code_output_mask) *", "cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:,", "decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding", "= torch.max(finished, cur_finished) if torch.sum(finished) == batch_size and step >= gt_decode_length - 1:", "nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear", "code_predictions = torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID)", "output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not eval_flag: decode_length = gt_decode_length else:", ":]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding", "batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1]", "str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss + var_loss + str_loss) /", "= decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if", "nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if", "cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions", "= df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits", "torch.nn.functional as F import numpy as np from .data_utils import data_utils from .modules", "cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding =", "nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size", "* 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training self.loss =", "nn from torch.autograd import Variable from torch import cuda import torch.optim as optim", "nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training self.loss = nn.CrossEntropyLoss() if args.optimizer == 'adam':", "cur_code_pred_logits = cur_code_pred_logits - (1.0 - code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if", "total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions", "df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0)", "class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size", "cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions = target_code_output[:, step] if self.hierarchy:", "= args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len =", "batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1]", "= gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx,", "args.dropout_rate self.nl = args.nl self.use_comments = args.use_comments self.code_context = args.code_context self.hierarchy = args.hierarchy", "args.num_plot_types self.word_vocab = word_vocab self.code_vocab = code_vocab self.batch_size = args.batch_size self.embedding_size = args.embedding_size", "= optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise", "0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding", "= self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch =", "range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for batch_idx", "decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if", "= F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0)", "= nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size,", "str_loss) / 3.0 predictions = torch.stack(predictions, dim=0) predictions = predictions.permute(1, 0) return total_loss,", "dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding", "= torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy:", "nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1", "2, self.embedding_size) # training self.loss = nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer =", "= args.use_comments self.code_context = args.code_context self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking =", "self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size *", "2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training self.loss = nn.CrossEntropyLoss()", "2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size", "= batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask =", "F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions", "0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output,", "decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding =", "decoder_input_embedding = [] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding,", "= self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask =", "1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0", "else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size *", "= nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training", "hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2)", "self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size +", "batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx,", "self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear", "0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions,", "if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear", "code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2),", "1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input", "for param_group in self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self): if self.gradient_clip > 0:", "else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding =", "args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip", "decoder_input_sketch = target_code_output[:, step] decoder_input = gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding = []", "self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder", "data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0 - output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1]", "= torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits", "if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy: if step", "= batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float())", "= batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not eval_flag: decode_length =", "attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits)", "> 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0]", "= args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len =", "2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2,", "= cur_code_pred_logits - (1.0 - code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag:", "cur_df_pred_logits = cur_copy_pred_logits - (1.0 - output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] *", "-param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr']", "= self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding =", "(batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input", "self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions = [] df_pred_logits", "nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size,", "if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions =", "as F import numpy as np from .data_utils import data_utils from .modules import", "self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size,", "4 + self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear", "+ str_loss) / 3.0 predictions = torch.stack(predictions, dim=0) predictions = predictions.permute(1, 0) return", "self.gradient_clip = args.gradient_clip self.lr = args.lr self.dropout_rate = args.dropout_rate self.nl = args.nl self.use_comments", "self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) *", "output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :])", "dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished =", "total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits =", "dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy:", "bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True)", "self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding", "train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False):", "torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask", "range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 =", "= self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits -", "len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] *", "ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions,", "self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state =", "cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits", "decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID", "= code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits,", "self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor", "= F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss + var_loss + str_loss) / 3.0", "= pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding]", "= cur_copy_pred_logits - (1.0 - output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions", "torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask", ":]) if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float',", "str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions", "code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions, dim=0)", "= torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag: finished = finished.cuda() pad_mask = pad_mask.cuda()", "= self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len)", "import torch.optim as optim from torch.nn.utils import clip_grad_norm import torch.nn.functional as F import", "code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask", "1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0", "self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif", "torch.nn as nn from torch.autograd import Variable from torch import cuda import torch.optim", "nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size", "torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy:", "= nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training self.loss = nn.CrossEntropyLoss() if args.optimizer ==", "decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices", "= batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 =", "data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output,", "'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding)", "self.loss = nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer", "dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention =", "attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights =", "= args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr = args.lr self.dropout_rate = args.dropout_rate self.nl =", "data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state", "if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output)", "lr=self.lr) else: raise ValueError('optimizer undefined: ', args.optimizer) def init_weights(self, param_init): for param in", "torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = []", "attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector", "cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input,", "+ self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size *", "nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2) if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size *", "decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if", "cur_str_pred_logits = cur_copy_pred_logits - (1.0 - output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] *", "self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits", "data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state", "input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output =", "2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size)", "batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = []", "= batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask =", "decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input)", "Variable from torch import cuda import torch.optim as optim from torch.nn.utils import clip_grad_norm" ]
[ "import settings from django.utils.translation import ugettext_lazy as _ from django.utils import timezone from", "from django.utils import timezone from ribo_api.models.usertypes import TinyIntegerField from .usertypes import NormalTextField class", "models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status code'), default=200) url =", "return json.loads(self.meta) return {} class Meta: verbose_name = _('activity_log') verbose_name_plural = _('activity_logs') db_table", "from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as", "= models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status", "NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property", "= models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def meta_json(self): if self.meta:", "TinyIntegerField(default=0) @property def meta_json(self): if self.meta: return json.loads(self.meta) return {} class Meta: verbose_name", "= models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at", "latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def meta_json(self): if self.meta: return json.loads(self.meta)", "django.conf import settings from django.utils.translation import ugettext_lazy as _ from django.utils import timezone", "django.utils.translation import ugettext_lazy as _ from django.utils import timezone from ribo_api.models.usertypes import TinyIntegerField", "= models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status code'), default=200) url", "default=200) url = models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta data'), default='{}') created_at =", "django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _", "= models.SmallIntegerField(_('Request status code'), default=200) url = models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta", "models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def meta_json(self): if self.meta: return", "max_length=2000, default='') meta = NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now)", "default='') meta = NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type", "if self.meta: return json.loads(self.meta) return {} class Meta: verbose_name = _('activity_log') verbose_name_plural =", "TinyIntegerField from .usertypes import NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL)", "models.SmallIntegerField(_('Request status code'), default=200) url = models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta data'),", "settings from django.utils.translation import ugettext_lazy as _ from django.utils import timezone from ribo_api.models.usertypes", "@property def meta_json(self): if self.meta: return json.loads(self.meta) return {} class Meta: verbose_name =", "created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def meta_json(self): if", "import TinyIntegerField from .usertypes import NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user =", "status = models.SmallIntegerField(_('Request status code'), default=200) url = models.CharField(_('Url'), max_length=2000, default='') meta =", "class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action =", "def meta_json(self): if self.meta: return json.loads(self.meta) return {} class Meta: verbose_name = _('activity_log')", "meta = NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type =", "= models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def meta_json(self): if self.meta: return json.loads(self.meta) return", "UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action = models.CharField(_('Action'),", "import NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField()", "as _ from django.utils import timezone from ribo_api.models.usertypes import TinyIntegerField from .usertypes import", "NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action", "action = models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status code'), default=200) url = models.CharField(_('Url'),", "default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def meta_json(self):", "user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request", "json from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy", "status code'), default=200) url = models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta data'), default='{}')", "models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def meta_json(self): if self.meta: return json.loads(self.meta) return {}", "self.meta: return json.loads(self.meta) return {} class Meta: verbose_name = _('activity_log') verbose_name_plural = _('activity_logs')", "ip = models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status code'), default=200)", "models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status code'),", "ribo_api.models.usertypes import TinyIntegerField from .usertypes import NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user", "json.loads(self.meta) return {} class Meta: verbose_name = _('activity_log') verbose_name_plural = _('activity_logs') db_table =", "from .usertypes import NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip", "from ribo_api.models.usertypes import TinyIntegerField from .usertypes import NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True)", "timezone from ribo_api.models.usertypes import TinyIntegerField from .usertypes import NormalTextField class UserActivityLog(models.Model): id =", "models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from django.utils", "max_length=6) status = models.SmallIntegerField(_('Request status code'), default=200) url = models.CharField(_('Url'), max_length=2000, default='') meta", "models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at =", "= models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status", "id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6)", "import timezone from ribo_api.models.usertypes import TinyIntegerField from .usertypes import NormalTextField class UserActivityLog(models.Model): id", "django.utils import timezone from ribo_api.models.usertypes import TinyIntegerField from .usertypes import NormalTextField class UserActivityLog(models.Model):", "import ugettext_lazy as _ from django.utils import timezone from ribo_api.models.usertypes import TinyIntegerField from", "url = models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now)", "import json from django.db import models from django.conf import settings from django.utils.translation import", "= models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status code'), default=200) url = models.CharField(_('Url'), max_length=2000,", "models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status =", "models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status code'), default=200) url = models.CharField(_('Url'), max_length=2000, default='')", "_ from django.utils import timezone from ribo_api.models.usertypes import TinyIntegerField from .usertypes import NormalTextField", "import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from", "= NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0)", "from django.utils.translation import ugettext_lazy as _ from django.utils import timezone from ribo_api.models.usertypes import", "ugettext_lazy as _ from django.utils import timezone from ribo_api.models.usertypes import TinyIntegerField from .usertypes", "code'), default=200) url = models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta data'), default='{}') created_at", "data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def", "= TinyIntegerField(default=0) @property def meta_json(self): if self.meta: return json.loads(self.meta) return {} class Meta:", "meta_json(self): if self.meta: return json.loads(self.meta) return {} class Meta: verbose_name = _('activity_log') verbose_name_plural", "from django.conf import settings from django.utils.translation import ugettext_lazy as _ from django.utils import", ".usertypes import NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip =", "device_type = TinyIntegerField(default=0) @property def meta_json(self): if self.meta: return json.loads(self.meta) return {} class", "return {} class Meta: verbose_name = _('activity_log') verbose_name_plural = _('activity_logs') db_table = 'ribo_user_activity_logs'" ]
[ "\"users\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True)", "import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS']", "Annotation(db.Model): __tablename__ = \"annotations\" id = db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True)", "from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False", "email): self.username = username self.email = email class Book(db.Model): __tablename__ = \"books\" id", "Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) class User(db.Model): __tablename__", "def __init__(self, username, email): self.username = username self.email = email class Book(db.Model): __tablename__", "= \"annotations\" id = db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_", "user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self, username, email): self.username = username", "= db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def hello_world(): return 'Hello World!' if", "primary_true=True) text range_ @app.route('/') def hello_world(): return 'Hello World!' if __name__ == '__main__':", "db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def hello_world(): return", "class Annotation(db.Model): __tablename__ = \"annotations\" id = db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id),", "class Book(db.Model): __tablename__ = \"books\" id = db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters", "Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] =", "= db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True) user_library =", "db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535))", "= Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) class User(db.Model):", "False db = SQLAlchemy(app) class User(db.Model): __tablename__ = \"users\" id = db.Column(db.Integer, primary_key=True)", "db = SQLAlchemy(app) class User(db.Model): __tablename__ = \"users\" id = db.Column(db.Integer, primary_key=True) username", "unique=True) email = db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self,", "chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = \"annotations\" id =", "db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def", "text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ =", "\"annotations\" id = db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/')", "follow_list = db.Column(db.String(65535)) def __init__(self, username, email): self.username = username self.email = email", "= db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = \"annotations\"", "email = db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self, username,", "__tablename__ = \"annotations\" id = db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text", "= db.Column(db.String(65535)) def __init__(self, username, email): self.username = username self.email = email class", "= \"books\" id = db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access", "SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app)", "= \"users\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(80),", "username self.email = email class Book(db.Model): __tablename__ = \"books\" id = db.Column(db.Integer, primary_key=True)", "primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list", "= db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535)) class", "unique=True) user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self, username, email): self.username =", "\"books\" id = db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access =", "db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = \"annotations\" id", "Book(db.Model): __tablename__ = \"books\" id = db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters =", "= username self.email = email class Book(db.Model): __tablename__ = \"books\" id = db.Column(db.Integer,", "<reponame>jimzers/annotate-mock from flask import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI']", "id = db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def", "User(db.Model): __tablename__ = \"users\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email", "__tablename__ = \"books\" id = db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer)", "= db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535))", "username, email): self.username = username self.email = email class Book(db.Model): __tablename__ = \"books\"", "'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) class User(db.Model): __tablename__ = \"users\" id", "= db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = \"annotations\" id = db.Column(db.Integer,", "username = db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list =", "= db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self, username, email):", "db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def hello_world(): return 'Hello World!' if __name__ ==", "id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True) user_library", "= email class Book(db.Model): __tablename__ = \"books\" id = db.Column(db.Integer, primary_key=True) text =", "app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) class User(db.Model): __tablename__ = \"users\" id =", "db.Column(db.String(65535)) def __init__(self, username, email): self.username = username self.email = email class Book(db.Model):", "from flask import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] =", "primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def hello_world(): return 'Hello", "SQLAlchemy(app) class User(db.Model): __tablename__ = \"users\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80),", "import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db =", "db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self, username, email): self.username = username self.email =", "= SQLAlchemy(app) class User(db.Model): __tablename__ = \"users\" id = db.Column(db.Integer, primary_key=True) username =", "__init__(self, username, email): self.username = username self.email = email class Book(db.Model): __tablename__ =", "__tablename__ = \"users\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email =", "db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model):", "= 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) class User(db.Model): __tablename__ = \"users\"", "db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def hello_world(): return 'Hello World!' if __name__", "= db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = \"annotations\" id = db.Column(db.Integer, primary_key=True) book_id =", "app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) class User(db.Model): __tablename__ =", "text range_ @app.route('/') def hello_world(): return 'Hello World!' if __name__ == '__main__': app.run()", "app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) class", "primary_key=True) text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__", "db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = \"annotations\" id = db.Column(db.Integer, primary_key=True)", "book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def hello_world(): return 'Hello World!'", "email class Book(db.Model): __tablename__ = \"books\" id = db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535))", "flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db", "= db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def hello_world():", "= False db = SQLAlchemy(app) class User(db.Model): __tablename__ = \"users\" id = db.Column(db.Integer,", "self.username = username self.email = email class Book(db.Model): __tablename__ = \"books\" id =", "db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self, username, email): self.username", "class User(db.Model): __tablename__ = \"users\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True)", "= db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self, username, email): self.username = username self.email", "db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = \"annotations\" id = db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer,", "id = db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535))", "has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = \"annotations\" id = db.Column(db.Integer, primary_key=True) book_id", "self.email = email class Book(db.Model): __tablename__ = \"books\" id = db.Column(db.Integer, primary_key=True) text", "flask import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db'" ]
[ "< min_x: min_x = x if y < min_y: min_y = y point_string", "re from collections import defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1", "not None: y = int(matcher.group(1)) x = int(matcher.group(2)) if x > max_x: max_x", "min_x == -1 or min_y == -1: min_x = x min_y = y", "import re from collections import defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x =", "or min_y == -1: min_x = x min_y = y else: if x", "point max_point_area = distances[point] print(f\"{point} = {distances[point]}\") print(f\"Max point is {max_point} with distance", "y if min_x == -1 or min_y == -1: min_x = x min_y", "if y < min_y: min_y = y point_string = f\"{y},{x}\" points.append(point_string) def calculate_distances(minx,", "is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x -", "input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1 min_y = -1 max_x =", "maxy): distances = defaultdict(lambda:1) for x_point in range(minx, maxx+1): for y_point in range(miny,", "maxy+1): point_string = f\"{y_point},{x_point}\" if point_string not in points: min_point_value = 100 min_point", "matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x", "os import re from collections import defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x", "dimensions: {min_x}, {min_y} to {max_x}, {max_y}\") max_point_area = 0 max_point = None orig_distances", "ValueError(f\"Formatting was wrong for {point}\") for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point)", "1 return distances print(f\"Grid dimensions: {min_x}, {min_y} to {max_x}, {max_y}\") max_point_area = 0", "= int(matcher.group(2)) if x > max_x: max_x = x if y > max_y:", "-1 max_y = -1 points = [] for line in input_file: matcher =", "miny, maxy): distances = defaultdict(lambda:1) for x_point in range(minx, maxx+1): for y_point in", "y > max_y: max_y = y if min_x == -1 or min_y ==", "point_string not in points: min_point_value = 100 min_point = None for point in", "= int(matcher.group(2)) current_point_value = abs(x_point - x) + abs(y_point -y) if point !=", "= abs(x_point - x) + abs(y_point -y) if point != min_point and current_point_value", "dict() for distance_key in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for", "x > max_x: max_x = x if y > max_y: max_y = y", "max_x, min_y, max_y) bigger_distances = calculate_distances(min_x -1, max_x +1, min_y -1, max_y+1) distances", "== min_point_value: min_point = None else: raise ValueError(f\"Formatting was wrong for {point}\") if", "else: raise ValueError(f\"Formatting was wrong for {point}\") for point in points: matcher =", "0 max_point = None orig_distances = calculate_distances(min_x, max_x, min_y, max_y) bigger_distances = calculate_distances(min_x", "max_y: max_y = y if min_x == -1 or min_y == -1: min_x", "current_point_value min_point = point else: raise ValueError(f\"Formatting was wrong for {point}\") for point", "point in distances.keys(): if distances[point] > max_point_area: max_point = point max_point_area = distances[point]", "x) + abs(y_point -y) if point != min_point and current_point_value == min_point_value: min_point", "> max_y: max_y = y if min_x == -1 or min_y == -1:", "calculate_distances(min_x -1, max_x +1, min_y -1, max_y+1) distances = dict() for distance_key in", "line) if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) if", "y_point) if current_point_value < min_point_value: min_point_value = current_point_value min_point = point else: raise", "= dict() for distance_key in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key]", "distances.keys(): if distances[point] > max_point_area: max_point = point max_point_area = distances[point] print(f\"{point} =", "if point_string not in points: min_point_value = 100 min_point = None for point", "- y_point) if current_point_value < min_point_value: min_point_value = current_point_value min_point = point else:", "max_point = point max_point_area = distances[point] print(f\"{point} = {distances[point]}\") print(f\"Max point is {max_point}", "not None: distances[min_point] += 1 return distances print(f\"Grid dimensions: {min_x}, {min_y} to {max_x},", "open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1 min_y = -1 max_x = -1 max_y", "distances = defaultdict(lambda:1) for x_point in range(minx, maxx+1): for y_point in range(miny, maxy+1):", "-1 points = [] for line in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if", "x_point in range(minx, maxx+1): for y_point in range(miny, maxy+1): point_string = f\"{y_point},{x_point}\" if", "is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x_point -", "= re.match(\"(\\d+),(\\d+)\", point) if matcher is not None: y = int(matcher.group(1)) x =", "current_point_value = abs(x_point - x) + abs(y_point -y) if point != min_point and", "- x_point) + abs(y - y_point) if current_point_value < min_point_value: min_point_value = current_point_value", "if point != min_point and current_point_value == min_point_value: min_point = None else: raise", "points.append(point_string) def calculate_distances(minx, maxx, miny, maxy): distances = defaultdict(lambda:1) for x_point in range(minx,", "None: distances[min_point] += 1 return distances print(f\"Grid dimensions: {min_x}, {min_y} to {max_x}, {max_y}\")", "int(matcher.group(2)) current_point_value = abs(x - x_point) + abs(y - y_point) if current_point_value <", "'day6_input.txt'), 'r') min_x = -1 min_y = -1 max_x = -1 max_y =", "+1, min_y -1, max_y+1) distances = dict() for distance_key in orig_distances: if orig_distances[distance_key]", "import os import re from collections import defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r')", "matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x_point", "range(minx, maxx+1): for y_point in range(miny, maxy+1): point_string = f\"{y_point},{x_point}\" if point_string not", "x_point) + abs(y - y_point) if current_point_value < min_point_value: min_point_value = current_point_value min_point", "was wrong for {point}\") if min_point is not None: distances[min_point] += 1 return", "= int(matcher.group(2)) current_point_value = abs(x - x_point) + abs(y - y_point) if current_point_value", "{min_y} to {max_x}, {max_y}\") max_point_area = 0 max_point = None orig_distances = calculate_distances(min_x,", "current_point_value = abs(x - x_point) + abs(y - y_point) if current_point_value < min_point_value:", "import defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1 min_y = -1", "matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is not None: y = int(matcher.group(1)) x", "min_y == -1: min_x = x min_y = y else: if x <", "distances[min_point] += 1 return distances print(f\"Grid dimensions: {min_x}, {min_y} to {max_x}, {max_y}\") max_point_area", "None else: raise ValueError(f\"Formatting was wrong for {point}\") if min_point is not None:", "min_point and current_point_value == min_point_value: min_point = None else: raise ValueError(f\"Formatting was wrong", "{min_x}, {min_y} to {max_x}, {max_y}\") max_point_area = 0 max_point = None orig_distances =", "distance_key in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point in", "f\"{y_point},{x_point}\" if point_string not in points: min_point_value = 100 min_point = None for", "None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x_point - x) +", "print(f\"Grid dimensions: {min_x}, {min_y} to {max_x}, {max_y}\") max_point_area = 0 max_point = None", "None for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is not", "+ abs(y_point -y) if point != min_point and current_point_value == min_point_value: min_point =", "for line in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is not None:", "orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point in distances.keys(): if distances[point] >", "maxx, miny, maxy): distances = defaultdict(lambda:1) for x_point in range(minx, maxx+1): for y_point", "= open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1 min_y = -1 max_x = -1", "points: min_point_value = 100 min_point = None for point in points: matcher =", "y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x_point - x) + abs(y_point", "re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2))", "= f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx, miny, maxy): distances = defaultdict(lambda:1) for x_point", "in distances.keys(): if distances[point] > max_point_area: max_point = point max_point_area = distances[point] print(f\"{point}", "min_y = y else: if x < min_x: min_x = x if y", "= y if min_x == -1 or min_y == -1: min_x = x", "min_point is not None: distances[min_point] += 1 return distances print(f\"Grid dimensions: {min_x}, {min_y}", "distances = dict() for distance_key in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] =", "min_y = y point_string = f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx, miny, maxy): distances", "abs(x_point - x) + abs(y_point -y) if point != min_point and current_point_value ==", "- x) + abs(y_point -y) if point != min_point and current_point_value == min_point_value:", "for {point}\") for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is", "= y point_string = f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx, miny, maxy): distances =", "max_y+1) distances = dict() for distance_key in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key]", "= None orig_distances = calculate_distances(min_x, max_x, min_y, max_y) bigger_distances = calculate_distances(min_x -1, max_x", "to {max_x}, {max_y}\") max_point_area = 0 max_point = None orig_distances = calculate_distances(min_x, max_x,", "was wrong for {point}\") for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if", "wrong for {point}\") if min_point is not None: distances[min_point] += 1 return distances", "distances print(f\"Grid dimensions: {min_x}, {min_y} to {max_x}, {max_y}\") max_point_area = 0 max_point =", "is not None: distances[min_point] += 1 return distances print(f\"Grid dimensions: {min_x}, {min_y} to", "not in points: min_point_value = 100 min_point = None for point in points:", "= f\"{y_point},{x_point}\" if point_string not in points: min_point_value = 100 min_point = None", "= point else: raise ValueError(f\"Formatting was wrong for {point}\") for point in points:", "= x if y > max_y: max_y = y if min_x == -1", "line in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is not None: y", "if min_x == -1 or min_y == -1: min_x = x min_y =", "abs(x - x_point) + abs(y - y_point) if current_point_value < min_point_value: min_point_value =", "matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) if x >", "for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is not None:", "for {point}\") if min_point is not None: distances[min_point] += 1 return distances print(f\"Grid", "if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point in distances.keys(): if distances[point]", "raise ValueError(f\"Formatting was wrong for {point}\") if min_point is not None: distances[min_point] +=", "max_x = x if y > max_y: max_y = y if min_x ==", "-1 max_x = -1 max_y = -1 points = [] for line in", "min_y: min_y = y point_string = f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx, miny, maxy):", "in range(miny, maxy+1): point_string = f\"{y_point},{x_point}\" if point_string not in points: min_point_value =", "= re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is not None: y = int(matcher.group(1)) x =", "min_x = x min_y = y else: if x < min_x: min_x =", "= defaultdict(lambda:1) for x_point in range(minx, maxx+1): for y_point in range(miny, maxy+1): point_string", "+= 1 return distances print(f\"Grid dimensions: {min_x}, {min_y} to {max_x}, {max_y}\") max_point_area =", "min_point = None else: raise ValueError(f\"Formatting was wrong for {point}\") if min_point is", "= int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x - x_point) + abs(y -", "if min_point is not None: distances[min_point] += 1 return distances print(f\"Grid dimensions: {min_x},", "= point max_point_area = distances[point] print(f\"{point} = {distances[point]}\") print(f\"Max point is {max_point} with", "= -1 max_y = -1 points = [] for line in input_file: matcher", "int(matcher.group(1)) x = int(matcher.group(2)) if x > max_x: max_x = x if y", "in points: min_point_value = 100 min_point = None for point in points: matcher", "== -1: min_x = x min_y = y else: if x < min_x:", "int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x - x_point) + abs(y - y_point)", "for x_point in range(minx, maxx+1): for y_point in range(miny, maxy+1): point_string = f\"{y_point},{x_point}\"", "min_x = x if y < min_y: min_y = y point_string = f\"{y},{x}\"", "is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) if x > max_x:", "orig_distances[distance_key] for point in distances.keys(): if distances[point] > max_point_area: max_point = point max_point_area", "x < min_x: min_x = x if y < min_y: min_y = y", "def calculate_distances(minx, maxx, miny, maxy): distances = defaultdict(lambda:1) for x_point in range(minx, maxx+1):", "max_y = y if min_x == -1 or min_y == -1: min_x =", "= int(matcher.group(1)) x = int(matcher.group(2)) if x > max_x: max_x = x if", "point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is not None: y", "{max_x}, {max_y}\") max_point_area = 0 max_point = None orig_distances = calculate_distances(min_x, max_x, min_y,", "= x min_y = y else: if x < min_x: min_x = x", "not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x - x_point)", "= int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x_point - x) + abs(y_point -y)", "max_y) bigger_distances = calculate_distances(min_x -1, max_x +1, min_y -1, max_y+1) distances = dict()", "min_point = None for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher", "x = int(matcher.group(2)) if x > max_x: max_x = x if y >", "for y_point in range(miny, maxy+1): point_string = f\"{y_point},{x_point}\" if point_string not in points:", "calculate_distances(minx, maxx, miny, maxy): distances = defaultdict(lambda:1) for x_point in range(minx, maxx+1): for", "int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x_point - x) + abs(y_point -y) if", "from collections import defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1 min_y", "distances[point] > max_point_area: max_point = point max_point_area = distances[point] print(f\"{point} = {distances[point]}\") print(f\"Max", "in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is not None: y =", "= calculate_distances(min_x -1, max_x +1, min_y -1, max_y+1) distances = dict() for distance_key", "< min_point_value: min_point_value = current_point_value min_point = point else: raise ValueError(f\"Formatting was wrong", "{point}\") for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is not", "maxx+1): for y_point in range(miny, maxy+1): point_string = f\"{y_point},{x_point}\" if point_string not in", "y = int(matcher.group(1)) x = int(matcher.group(2)) if x > max_x: max_x = x", "else: if x < min_x: min_x = x if y < min_y: min_y", "orig_distances = calculate_distances(min_x, max_x, min_y, max_y) bigger_distances = calculate_distances(min_x -1, max_x +1, min_y", "max_x: max_x = x if y > max_y: max_y = y if min_x", "int(matcher.group(2)) current_point_value = abs(x_point - x) + abs(y_point -y) if point != min_point", "in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is not None: y =", "raise ValueError(f\"Formatting was wrong for {point}\") for point in points: matcher = re.match(\"(\\d+),(\\d+)\",", "None orig_distances = calculate_distances(min_x, max_x, min_y, max_y) bigger_distances = calculate_distances(min_x -1, max_x +1,", "for point in distances.keys(): if distances[point] > max_point_area: max_point = point max_point_area =", "f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx, miny, maxy): distances = defaultdict(lambda:1) for x_point in", "re.match(\"(\\d+),(\\d+)\", point) if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2))", "return distances print(f\"Grid dimensions: {min_x}, {min_y} to {max_x}, {max_y}\") max_point_area = 0 max_point", "{point}\") if min_point is not None: distances[min_point] += 1 return distances print(f\"Grid dimensions:", "collections import defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1 min_y =", "y_point in range(miny, maxy+1): point_string = f\"{y_point},{x_point}\" if point_string not in points: min_point_value", "if y > max_y: max_y = y if min_x == -1 or min_y", "100 min_point = None for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if", "max_point_area = 0 max_point = None orig_distances = calculate_distances(min_x, max_x, min_y, max_y) bigger_distances", "-1: min_x = x min_y = y else: if x < min_x: min_x", "in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point in distances.keys():", "for distance_key in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point", "== bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point in distances.keys(): if distances[point] > max_point_area:", "max_point_area: max_point = point max_point_area = distances[point] print(f\"{point} = {distances[point]}\") print(f\"Max point is", "ValueError(f\"Formatting was wrong for {point}\") if min_point is not None: distances[min_point] += 1", "= -1 max_x = -1 max_y = -1 points = [] for line", "x if y < min_y: min_y = y point_string = f\"{y},{x}\" points.append(point_string) def", "= current_point_value min_point = point else: raise ValueError(f\"Formatting was wrong for {point}\") for", "!= min_point and current_point_value == min_point_value: min_point = None else: raise ValueError(f\"Formatting was", "wrong for {point}\") for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher", "x = int(matcher.group(2)) current_point_value = abs(x - x_point) + abs(y - y_point) if", "-1 or min_y == -1: min_x = x min_y = y else: if", "min_point_value = current_point_value min_point = point else: raise ValueError(f\"Formatting was wrong for {point}\")", "input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is not None: y = int(matcher.group(1))", "range(miny, maxy+1): point_string = f\"{y_point},{x_point}\" if point_string not in points: min_point_value = 100", "point) if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value", "= calculate_distances(min_x, max_x, min_y, max_y) bigger_distances = calculate_distances(min_x -1, max_x +1, min_y -1,", "min_point_value = 100 min_point = None for point in points: matcher = re.match(\"(\\d+),(\\d+)\",", "points = [] for line in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if matcher", "None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x - x_point) +", "min_x: min_x = x if y < min_y: min_y = y point_string =", "-y) if point != min_point and current_point_value == min_point_value: min_point = None else:", "point else: raise ValueError(f\"Formatting was wrong for {point}\") for point in points: matcher", "point_string = f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx, miny, maxy): distances = defaultdict(lambda:1) for", "if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value =", "= abs(x - x_point) + abs(y - y_point) if current_point_value < min_point_value: min_point_value", "{max_y}\") max_point_area = 0 max_point = None orig_distances = calculate_distances(min_x, max_x, min_y, max_y)", "= y else: if x < min_x: min_x = x if y <", "y else: if x < min_x: min_x = x if y < min_y:", "+ abs(y - y_point) if current_point_value < min_point_value: min_point_value = current_point_value min_point =", "> max_point_area: max_point = point max_point_area = distances[point] print(f\"{point} = {distances[point]}\") print(f\"Max point", "min_y = -1 max_x = -1 max_y = -1 points = [] for", "calculate_distances(min_x, max_x, min_y, max_y) bigger_distances = calculate_distances(min_x -1, max_x +1, min_y -1, max_y+1)", "= None for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is", "min_point_value: min_point = None else: raise ValueError(f\"Formatting was wrong for {point}\") if min_point", "if current_point_value < min_point_value: min_point_value = current_point_value min_point = point else: raise ValueError(f\"Formatting", "matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is not None: y = int(matcher.group(1)) x", "= -1 min_y = -1 max_x = -1 max_y = -1 points =", "min_point = point else: raise ValueError(f\"Formatting was wrong for {point}\") for point in", "max_point = None orig_distances = calculate_distances(min_x, max_x, min_y, max_y) bigger_distances = calculate_distances(min_x -1,", "= [] for line in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is", "in range(minx, maxx+1): for y_point in range(miny, maxy+1): point_string = f\"{y_point},{x_point}\" if point_string", "abs(y - y_point) if current_point_value < min_point_value: min_point_value = current_point_value min_point = point", "defaultdict(lambda:1) for x_point in range(minx, maxx+1): for y_point in range(miny, maxy+1): point_string =", "if x > max_x: max_x = x if y > max_y: max_y =", "'r') min_x = -1 min_y = -1 max_x = -1 max_y = -1", "< min_y: min_y = y point_string = f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx, miny,", "max_x +1, min_y -1, max_y+1) distances = dict() for distance_key in orig_distances: if", "point_string = f\"{y_point},{x_point}\" if point_string not in points: min_point_value = 100 min_point =", "int(matcher.group(2)) if x > max_x: max_x = x if y > max_y: max_y", "-1, max_x +1, min_y -1, max_y+1) distances = dict() for distance_key in orig_distances:", "orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point in distances.keys(): if", "if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) if x", "x min_y = y else: if x < min_x: min_x = x if", "if x < min_x: min_x = x if y < min_y: min_y =", "= None else: raise ValueError(f\"Formatting was wrong for {point}\") if min_point is not", "> max_x: max_x = x if y > max_y: max_y = y if", "== -1 or min_y == -1: min_x = x min_y = y else:", "min_x = -1 min_y = -1 max_x = -1 max_y = -1 points", "min_point_value: min_point_value = current_point_value min_point = point else: raise ValueError(f\"Formatting was wrong for", "and current_point_value == min_point_value: min_point = None else: raise ValueError(f\"Formatting was wrong for", "distances[distance_key] = orig_distances[distance_key] for point in distances.keys(): if distances[point] > max_point_area: max_point =", "y < min_y: min_y = y point_string = f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx,", "point != min_point and current_point_value == min_point_value: min_point = None else: raise ValueError(f\"Formatting", "min_y -1, max_y+1) distances = dict() for distance_key in orig_distances: if orig_distances[distance_key] ==", "y point_string = f\"{y},{x}\" points.append(point_string) def calculate_distances(minx, maxx, miny, maxy): distances = defaultdict(lambda:1)", "= x if y < min_y: min_y = y point_string = f\"{y},{x}\" points.append(point_string)", "points: matcher = re.match(\"(\\d+),(\\d+)\", point) if matcher is not None: y = int(matcher.group(1))", "min_y, max_y) bigger_distances = calculate_distances(min_x -1, max_x +1, min_y -1, max_y+1) distances =", "max_x = -1 max_y = -1 points = [] for line in input_file:", "max_y = -1 points = [] for line in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\",", "defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1 min_y = -1 max_x", "x if y > max_y: max_y = y if min_x == -1 or", "else: raise ValueError(f\"Formatting was wrong for {point}\") if min_point is not None: distances[min_point]", "= orig_distances[distance_key] for point in distances.keys(): if distances[point] > max_point_area: max_point = point", "x = int(matcher.group(2)) current_point_value = abs(x_point - x) + abs(y_point -y) if point", "abs(y_point -y) if point != min_point and current_point_value == min_point_value: min_point = None", "bigger_distances = calculate_distances(min_x -1, max_x +1, min_y -1, max_y+1) distances = dict() for", "-1, max_y+1) distances = dict() for distance_key in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]:", "y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x - x_point) + abs(y", "-1 min_y = -1 max_x = -1 max_y = -1 points = []", "= -1 points = [] for line in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line)", "bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point in distances.keys(): if distances[point] > max_point_area: max_point", "max_point_area = distances[point] print(f\"{point} = {distances[point]}\") print(f\"Max point is {max_point} with distance {max_point_area}\")", "current_point_value < min_point_value: min_point_value = current_point_value min_point = point else: raise ValueError(f\"Formatting was", "not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x_point - x)", "= 100 min_point = None for point in points: matcher = re.match(\"(\\d+),(\\d+)\", point)", "current_point_value == min_point_value: min_point = None else: raise ValueError(f\"Formatting was wrong for {point}\")", "= 0 max_point = None orig_distances = calculate_distances(min_x, max_x, min_y, max_y) bigger_distances =", "if distances[point] > max_point_area: max_point = point max_point_area = distances[point] print(f\"{point} = {distances[point]}\")", "[] for line in input_file: matcher = re.match(\"(\\d+),\\s(\\d+)\", line) if matcher is not", "None: y = int(matcher.group(1)) x = int(matcher.group(2)) if x > max_x: max_x =" ]
[ "= input(num) if teste.isnumeric(): teste = int(teste) return teste break else: print('\\033[0;31mERRO! digite", "else: print('\\033[0;31mERRO! digite um número inteiro válido.\\033[m') # PROGRAMA PRINCIPAL n = leiaInt('Digite", "def leiaInt(num): while True: teste = input(num) if teste.isnumeric(): teste = int(teste) return", "PRINCIPAL n = leiaInt('Digite umm número: ') print(f'Você acabou de digitar o número", "teste break else: print('\\033[0;31mERRO! digite um número inteiro válido.\\033[m') # PROGRAMA PRINCIPAL n", "válido.\\033[m') # PROGRAMA PRINCIPAL n = leiaInt('Digite umm número: ') print(f'Você acabou de", "teste.isnumeric(): teste = int(teste) return teste break else: print('\\033[0;31mERRO! digite um número inteiro", "int(teste) return teste break else: print('\\033[0;31mERRO! digite um número inteiro válido.\\033[m') # PROGRAMA", "while True: teste = input(num) if teste.isnumeric(): teste = int(teste) return teste break", "n = leiaInt('Digite umm número: ') print(f'Você acabou de digitar o número {n}')", "inteiro válido.\\033[m') # PROGRAMA PRINCIPAL n = leiaInt('Digite umm número: ') print(f'Você acabou", "if teste.isnumeric(): teste = int(teste) return teste break else: print('\\033[0;31mERRO! digite um número", "leiaInt(num): while True: teste = input(num) if teste.isnumeric(): teste = int(teste) return teste", "return teste break else: print('\\033[0;31mERRO! digite um número inteiro válido.\\033[m') # PROGRAMA PRINCIPAL", "# PROGRAMA PRINCIPAL n = leiaInt('Digite umm número: ') print(f'Você acabou de digitar", "um número inteiro válido.\\033[m') # PROGRAMA PRINCIPAL n = leiaInt('Digite umm número: ')", "True: teste = input(num) if teste.isnumeric(): teste = int(teste) return teste break else:", "digite um número inteiro válido.\\033[m') # PROGRAMA PRINCIPAL n = leiaInt('Digite umm número:", "input(num) if teste.isnumeric(): teste = int(teste) return teste break else: print('\\033[0;31mERRO! digite um", "= int(teste) return teste break else: print('\\033[0;31mERRO! digite um número inteiro válido.\\033[m') #", "número inteiro válido.\\033[m') # PROGRAMA PRINCIPAL n = leiaInt('Digite umm número: ') print(f'Você", "teste = input(num) if teste.isnumeric(): teste = int(teste) return teste break else: print('\\033[0;31mERRO!", "teste = int(teste) return teste break else: print('\\033[0;31mERRO! digite um número inteiro válido.\\033[m')", "PROGRAMA PRINCIPAL n = leiaInt('Digite umm número: ') print(f'Você acabou de digitar o", "print('\\033[0;31mERRO! digite um número inteiro válido.\\033[m') # PROGRAMA PRINCIPAL n = leiaInt('Digite umm", "break else: print('\\033[0;31mERRO! digite um número inteiro válido.\\033[m') # PROGRAMA PRINCIPAL n =" ]
[ "= patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method == \"GET\": form = PatientForm(instance=selected_patient) context", "context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method == \"POST\": form = PatientForm(request.POST)", "def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form = PatientForm() context", "redirect(\"/\") # Insert def add_(request): if request.method==\"POST\": form = PatientForm(data=request.POST) if form.is_valid(): form.save()", "django.shortcuts import render,redirect,get_object_or_404 #import requests # Create your views here. # Retrieve def", "= form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient. booking_date = form.data[\" booking_date\"]", "return redirect(\"/\") # Insert def add_(request): if request.method==\"POST\": form = PatientForm(data=request.POST) if form.is_valid():", "import PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import requests # Create your views here.", "Insert def add_(request): if request.method==\"POST\": form = PatientForm(data=request.POST) if form.is_valid(): form.save() return render", "django.http import HttpResponse,HttpResponseRedirect from . models import HelthDepartment , Patient from . forms", "requests # Create your views here. # Retrieve def index(request): #categories = Category.objects.all()", "= PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method == \"POST\": form", "= Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method == \"GET\": form", "return render(request,\"edit.html\",context) if request.method == \"POST\": form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name", "from . models import HelthDepartment , Patient from . forms import PatientForm from", "booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save() return redirect(\"/\")", "= {\"form\":form} return render(request, \"booking_app/index.html\",context) # Delete def delete(request,id): patient = Patient.objects.get(id=id) patient.delete()", "Patient from . forms import PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import requests #", "== \"GET\": form = PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method", "delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients =", "def delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients", "from django.shortcuts import render,redirect,get_object_or_404 #import requests # Create your views here. # Retrieve", "#categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form = PatientForm() context = {\"form\":form}", "= get_objects_or_404(Patient,id=id) if request.method == \"GET\": form = PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient}", "\"GET\": form = PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method ==", "#import requests # Create your views here. # Retrieve def index(request): #categories =", "form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient. booking_date = form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department", "helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form = PatientForm() context = {\"form\":form} return render(request, \"booking_app/index.html\",context)", "if request.method == \"POST\": form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"]", "request.method == \"POST\": form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact", "PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import requests # Create your views here. #", "= Patient.objects.all() form = PatientForm() context = {\"form\":form} return render(request, \"booking_app/index.html\",context) # Delete", "patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method == \"GET\":", "= form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") # Insert def add_(request): if", "PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email = form.data[\"email\"]", "= PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email =", "# update def edit(request,id): patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id)", "form = PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method == \"POST\":", "import HelthDepartment , Patient from . forms import PatientForm from django.shortcuts import render,redirect,get_object_or_404", "PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method == \"POST\": form =", "form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email", "= form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") # Insert", "render,redirect,get_object_or_404 #import requests # Create your views here. # Retrieve def index(request): #categories", "form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") # Insert def add_(request): if request.method==\"POST\": form = PatientForm(data=request.POST)", "form = PatientForm() context = {\"form\":form} return render(request, \"booking_app/index.html\",context) # Delete def delete(request,id):", "def edit(request,id): patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method", "= {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method == \"POST\": form = PatientForm(request.POST) if", "= form.data[\"email\"] selected_patient. booking_date = form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"]", "selected_patient = get_objects_or_404(Patient,id=id) if request.method == \"GET\": form = PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form", "# Insert def add_(request): if request.method==\"POST\": form = PatientForm(data=request.POST) if form.is_valid(): form.save() return", "import HttpResponse,HttpResponseRedirect from . models import HelthDepartment , Patient from . forms import", "= Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients = Patient.objects.all() #selected_patient", "selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") #", "index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form = PatientForm() context =", "Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method == \"GET\": form =", "render(request,\"edit.html\",context) if request.method == \"POST\": form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name =", "booking_date = form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"]", "forms import PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import requests # Create your views", "# Delete def delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def", ". models import HelthDepartment , Patient from . forms import PatientForm from django.shortcuts", "import render,redirect,get_object_or_404 #import requests # Create your views here. # Retrieve def index(request):", "== \"POST\": form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact =", "form.data[\"email\"] selected_patient. booking_date = form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history", "\"POST\": form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact = form.data[\"contact\"]", "PatientForm() context = {\"form\":form} return render(request, \"booking_app/index.html\",context) # Delete def delete(request,id): patient =", "HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient =", "models import HelthDepartment , Patient from . forms import PatientForm from django.shortcuts import", "if request.method == \"GET\": form = PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context)", "context = {\"form\":form} return render(request, \"booking_app/index.html\",context) # Delete def delete(request,id): patient = Patient.objects.get(id=id)", "= form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient. booking_date = form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"]", "selected_patient.email = form.data[\"email\"] selected_patient. booking_date = form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department =", "= form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save()", "Create your views here. # Retrieve def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients", "form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient. booking_date = form.data[\" booking_date\"] selected_patient.appointment_date", "HelthDepartment , Patient from . forms import PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import", "patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method == \"GET\": form = PatientForm(instance=selected_patient) context =", "#print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient. booking_date =", "selected_patient. booking_date = form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history =", "selected_patient.name = form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient. booking_date = form.data[\"", "# Create your views here. # Retrieve def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all()", "patients = Patient.objects.all() form = PatientForm() context = {\"form\":form} return render(request, \"booking_app/index.html\",context) #", "get_objects_or_404(Patient,id=id) if request.method == \"GET\": form = PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return", "selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") # Insert def add_(request):", ",\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method == \"POST\": form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__)", "form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") # Insert def", "form.data[\" booking_date\"] selected_patient.appointment_date = form.data[\"appointment_date\"] selected_patient.helth_department = form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save() return", "patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients = Patient.objects.all()", "<filename>booking_app/views.py from django.http import HttpResponse,HttpResponseRedirect from . models import HelthDepartment , Patient from", "= form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") # Insert def add_(request): if request.method==\"POST\": form =", "update def edit(request,id): patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if", "render(request, \"booking_app/index.html\",context) # Delete def delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) #", "return render(request, \"booking_app/index.html\",context) # Delete def delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER'))", "Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients = Patient.objects.all() #selected_patient =", "Patient.objects.all() form = PatientForm() context = {\"form\":form} return render(request, \"booking_app/index.html\",context) # Delete def", "# Retrieve def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form =", "HttpResponse,HttpResponseRedirect from . models import HelthDepartment , Patient from . forms import PatientForm", ". forms import PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import requests # Create your", "def add_(request): if request.method==\"POST\": form = PatientForm(data=request.POST) if form.is_valid(): form.save() return render (request,", "form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient. booking_date", ", Patient from . forms import PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import requests", "Delete def delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id):", "your views here. # Retrieve def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients =", "Retrieve def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form = PatientForm()", "from . forms import PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import requests # Create", "\"booking_app/index.html\",context) # Delete def delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update", "selected_patient.history = form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") # Insert def add_(request): if request.method==\"POST\": form", "request.method == \"GET\": form = PatientForm(instance=selected_patient) context = {\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if", "selected_patient.contact = form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient. booking_date = form.data[\" booking_date\"] selected_patient.appointment_date =", "form.data[\"helth_department\"] selected_patient.history = form.data[\"historyl\"] selected_patient.save() return redirect(\"/\") # Insert def add_(request): if request.method==\"POST\":", "Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form = PatientForm() context = {\"form\":form} return render(request,", "= PatientForm() context = {\"form\":form} return render(request, \"booking_app/index.html\",context) # Delete def delete(request,id): patient", "{\"patients\":patients,\"form\":form ,\"selected_patient\":selected_patient} return render(request,\"edit.html\",context) if request.method == \"POST\": form = PatientForm(request.POST) if form.is_valid:", "#selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method == \"GET\": form = PatientForm(instance=selected_patient)", "edit(request,id): patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method ==", "views here. # Retrieve def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all()", "patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id)", "= Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form = PatientForm() context = {\"form\":form} return", "from django.http import HttpResponse,HttpResponseRedirect from . models import HelthDepartment , Patient from .", "selected_patient.save() return redirect(\"/\") # Insert def add_(request): if request.method==\"POST\": form = PatientForm(data=request.POST) if", "add_(request): if request.method==\"POST\": form = PatientForm(data=request.POST) if form.is_valid(): form.save() return render (request, \"website/index.html\")", "here. # Retrieve def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form", "{\"form\":form} return render(request, \"booking_app/index.html\",context) # Delete def delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return", "if form.is_valid: #print(form.__dict__) selected_patient.name = form.data[\"name\"] selected_patient.contact = form.data[\"contact\"] selected_patient.email = form.data[\"email\"] selected_patient.", "return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient" ]
[ "ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] - 5, 'last_name': request.user.last_name}) return render(request, 'phrasebook/profile.html',", "user_languages.__len__() > 0: return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages':", "from django.contrib.auth.decorators import login_required from django.http import JsonResponse from django.shortcuts import render, redirect", "if user_languages.__len__() > 0: return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html',", "context={'languages': languages}) @login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request):", "from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import * def index(request): return render(request, 'phrasebook/index.html')", "render, redirect from django.utils.decorators import decorator_from_middleware from phrasebook.contexts import get_sidebar_args from phrasebook.middleware import", "get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import * def index(request): return render(request,", "profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] - 5, 'last_name': request.user.last_name}) return render(request,", "user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name'))", "0: return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required()", "django.http import JsonResponse from django.shortcuts import render, redirect from django.utils.decorators import decorator_from_middleware from", "from django.http import JsonResponse from django.shortcuts import render, redirect from django.utils.decorators import decorator_from_middleware", "languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request): return render(request,", "render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required()", "languages}) @login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx", "render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required() def first_login(request): user_languages", "redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request):", "import decorator_from_middleware from phrasebook.contexts import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import", "def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(),", "context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] - 5,", "import render, redirect from django.utils.decorators import decorator_from_middleware from phrasebook.contexts import get_sidebar_args from phrasebook.middleware", "import * def index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words =", "* def index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10]", "render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request,", "lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx = get_sidebar_args(request, {})", "@login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx =", "from phrasebook.contexts import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import * def", "<filename>phrasebook/views/general.py<gh_stars>1-10 from django.contrib.auth.decorators import login_required from django.http import JsonResponse from django.shortcuts import render,", "context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user)", "words, \"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__()", "phrasebook.models import * def index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words", "def index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return", "= get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] - 5, 'last_name': request.user.last_name}) return render(request, 'phrasebook/profile.html', context=ctx)", "'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def", "django.shortcuts import render, redirect from django.utils.decorators import decorator_from_middleware from phrasebook.contexts import get_sidebar_args from", "JsonResponse from django.shortcuts import render, redirect from django.utils.decorators import decorator_from_middleware from phrasebook.contexts import", "'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required() def first_login(request): user_languages =", "def profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] - 5, 'last_name': request.user.last_name}) return", "\"active\"})) @login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return redirect('phrasebook:app')", "index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request,", "> 0: return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages})", "login_required from django.http import JsonResponse from django.shortcuts import render, redirect from django.utils.decorators import", "return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html',", "@login_required() def profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] - 5, 'last_name': request.user.last_name})", "import JsonResponse from django.shortcuts import render, redirect from django.utils.decorators import decorator_from_middleware from phrasebook.contexts", "else: languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request): return", "@login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return redirect('phrasebook:app') else:", "words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(), \"page_app\": \"active\"}))", "decorator_from_middleware from phrasebook.contexts import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import *", "= Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required()", "return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs':", "app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(), \"page_app\":", "django.utils.decorators import decorator_from_middleware from phrasebook.contexts import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models", "= list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html',", "{})) @login_required() def profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] - 5, 'last_name':", "phrasebook.contexts import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import * def index(request):", "django.contrib.auth.decorators import login_required from django.http import JsonResponse from django.shortcuts import render, redirect from", "words.__len__(), \"page_app\": \"active\"})) @login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0:", "'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\":", "render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len']", "def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return redirect('phrasebook:app') else: languages", "redirect from django.utils.decorators import decorator_from_middleware from phrasebook.contexts import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware", "{\"words\": words, \"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if", "@login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words,", "return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {}))", "from phrasebook.models import * def index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request):", "import FirstLoginMiddleware from phrasebook.models import * def index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware)", "@decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\":", "def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx = get_sidebar_args(request,", "import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import * def index(request): return", "from django.shortcuts import render, redirect from django.utils.decorators import decorator_from_middleware from phrasebook.contexts import get_sidebar_args", "import login_required from django.http import JsonResponse from django.shortcuts import render, redirect from django.utils.decorators", "return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required() def first_login(request):", "\"page_app\": \"active\"})) @login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return", "first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return redirect('phrasebook:app') else: languages =", "= UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return", "'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] -", "Word.objects.filter(category__user=request.user).order_by(\"-created_on\")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {\"words\": words, \"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required() def", "from django.utils.decorators import decorator_from_middleware from phrasebook.contexts import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from", "return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def", "phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import * def index(request): return render(request, 'phrasebook/index.html') @login_required()", "list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request,", "\"words__len\": words.__len__(), \"page_app\": \"active\"})) @login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() >", "UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return render(request,", "FirstLoginMiddleware from phrasebook.models import * def index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def" ]
[ "listado, tabla, usuarios): self.registro = registro self.listado = listado self.tabla = tabla self.usuarios", "my_win, registro, listado, tabla, usuarios): self.registro = registro self.listado = listado self.tabla =", "QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado, tabla, usuarios): self.registro =", "__init__(self, my_win, registro, listado, tabla, usuarios): self.registro = registro self.listado = listado self.tabla", "QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado, tabla, usuarios): self.registro = registro", "QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self):", "ventanas.ventana_bienvenida import Ui_music_window from PyQt5.Qt import QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win,", "= str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def", "Ui_music_window from PyQt5.Qt import QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado,", "self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self): self.tabla.setupUi(self.my_win) def volver_incio(self): self.setupUi(self.my_win) def registrar_usuario(self): self.usuarios.setupUi(self.my_win)", "= tabla self.usuarios = usuarios self.my_win = my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio)", "#incluir hora hora = QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def", "setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate()", "self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate() date = str(date.toPyDate())", "hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self): self.tabla.setupUi(self.my_win)", "listado self.tabla = tabla self.usuarios = usuarios self.my_win = my_win def setupUi(self, my_win):", "from PyQt5.Qt import QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado, tabla,", "date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt)", "usuarios): self.registro = registro self.listado = listado self.tabla = tabla self.usuarios = usuarios", "<filename>ventanas_clases/bienvenidos.py from ventanas.ventana_bienvenida import Ui_music_window from PyQt5.Qt import QTime, QDate class VentanaMusical(Ui_music_window): def", "import QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado, tabla, usuarios): self.registro", "QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime() hora_txt = hora.toString(\"hh:mm\")", "usuarios self.my_win = my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario)", "self.listado = listado self.tabla = tabla self.usuarios = usuarios self.my_win = my_win def", "= QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def", "my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate() date", "self.tabla = tabla self.usuarios = usuarios self.my_win = my_win def setupUi(self, my_win): super().setupUi(my_win)", "self.usuarios = usuarios self.my_win = my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones)", "class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado, tabla, usuarios): self.registro = registro self.listado", "registro self.listado = listado self.tabla = tabla self.usuarios = usuarios self.my_win = my_win", "= usuarios self.my_win = my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones)", "fecha date = QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime()", "self.my_win = my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir", "self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date)", "self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir", "self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora", "my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date", "hora hora = QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self):", "def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self): self.tabla.setupUi(self.my_win) def volver_incio(self): self.setupUi(self.my_win) def", "str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self):", "= listado self.tabla = tabla self.usuarios = usuarios self.my_win = my_win def setupUi(self,", "self.registro = registro self.listado = listado self.tabla = tabla self.usuarios = usuarios self.my_win", "self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora", "hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self): self.tabla.setupUi(self.my_win) def volver_incio(self):", "date = QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime() hora_txt", "= registro self.listado = listado self.tabla = tabla self.usuarios = usuarios self.my_win =", "= my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha", "PyQt5.Qt import QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado, tabla, usuarios):", "from ventanas.ventana_bienvenida import Ui_music_window from PyQt5.Qt import QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self,", "tabla self.usuarios = usuarios self.my_win = my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion)", "super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate() date =", "= QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime() hora_txt =", "VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado, tabla, usuarios): self.registro = registro self.listado =", "registro, listado, tabla, usuarios): self.registro = registro self.listado = listado self.tabla = tabla", "self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win)", "tabla, usuarios): self.registro = registro self.listado = listado self.tabla = tabla self.usuarios =", "self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self): self.tabla.setupUi(self.my_win) def volver_incio(self): self.setupUi(self.my_win)", "def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date =", "#incluir fecha date = QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora =", "= hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self): self.tabla.setupUi(self.my_win) def", "import Ui_music_window from PyQt5.Qt import QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro,", "mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self): self.tabla.setupUi(self.my_win) def volver_incio(self): self.setupUi(self.my_win) def registrar_usuario(self):", "hora = QTime.currentTime() hora_txt = hora.toString(\"hh:mm\") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win)", "def __init__(self, my_win, registro, listado, tabla, usuarios): self.registro = registro self.listado = listado" ]
[ "instance and uses ``grab`` to render an image of the window. The window", "imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname):", "try: # pyside-specific if qApp: qApp.shutdown() except NameError: pass QApplication.exec_ = lambda _:", "and uses ``grab`` to render an image of the window. The window is", "app is None: app = QApplication([]) app.processEvents() # get top-level widgets that aren't", "\"\"\" try: # pyside-specific if qApp: qApp.shutdown() except NameError: pass QApplication.exec_ = lambda", "None: app = QApplication([]) app.processEvents() # get top-level widgets that aren't hidden widgets", "in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" try: # pyside-specific if qApp: qApp.shutdown()", "call ``show()`` again to render it in a subsequent cell. ``processEvents`` is called", "sphinx-gallery configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance() if app is None:", "of the window. The window is closed afterward, so you have to call", "uses ``grab`` to render an image of the window. The window is closed", "still need to propagate. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration.", "= ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation of a Qt window", "Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" try: # pyside-specific", "imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance() if app is None: app = QApplication([])", "configuration. \"\"\" try: # pyside-specific if qApp: qApp.shutdown() except NameError: pass QApplication.exec_ =", "top-level widgets that aren't hidden widgets = [w for w in app.topLevelWidgets() if", "have to call ``show()`` again to render it in a subsequent cell. ``processEvents``", "return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing QApplication and disable ``exec_``.", "reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing QApplication and disable ``exec_``. Disabling ``QApplication.exec_`` means your", "qtpy.QtWidgets import QApplication __all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation", "cell. ``processEvents`` is called once in case events still need to propagate. Intended", "example execution by sphinx-gallery. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration.", "an image of the window. The window is closed afterward, so you have", "use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator'] app =", "render it in a subsequent cell. ``processEvents`` is called once in case events", "w.isHidden()] rendered_imgs = [] for widg, imgpath in zip(widgets, imgpath_iter): pixmap = widg.grab()", "the current application instance and uses ``grab`` to render an image of the", "event loop (so the scripts work when run normally) without blocking example execution", "gallery_conf): \"\"\"Basic implementation of a Qt window scraper. Looks for any non-hidden windows", "gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing QApplication and disable ``exec_``. Disabling ``QApplication.exec_``", "Disabling ``QApplication.exec_`` means your example scripts can run the Qt event loop (so", "aren't hidden widgets = [w for w in app.topLevelWidgets() if not w.isHidden()] rendered_imgs", "normally) without blocking example execution by sphinx-gallery. Intended for use in ``image_scrappers`` in", "in zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def", "window is closed afterward, so you have to call ``show()`` again to render", "\"\"\"Basic implementation of a Qt window scraper. Looks for any non-hidden windows in", "scripts can run the Qt event loop (so the scripts work when run", "import QApplication __all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation of", "is None: app = QApplication([]) app.processEvents() # get top-level widgets that aren't hidden", "def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing QApplication and disable ``exec_``. Disabling ``QApplication.exec_`` means", "application instance and uses ``grab`` to render an image of the window. The", "the scripts work when run normally) without blocking example execution by sphinx-gallery. Intended", "a Qt window scraper. Looks for any non-hidden windows in the current application", "for any non-hidden windows in the current application instance and uses ``grab`` to", "'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation of a Qt window scraper. Looks", "any non-hidden windows in the current application instance and uses ``grab`` to render", "in the sphinx-gallery configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance() if app", "when run normally) without blocking example execution by sphinx-gallery. Intended for use in", "# pyside-specific if qApp: qApp.shutdown() except NameError: pass QApplication.exec_ = lambda _: None", "closed afterward, so you have to call ``show()`` again to render it in", "need to propagate. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\"", "widgets = [w for w in app.topLevelWidgets() if not w.isHidden()] rendered_imgs = []", "Qt window scraper. Looks for any non-hidden windows in the current application instance", "to call ``show()`` again to render it in a subsequent cell. ``processEvents`` is", "QApplication and disable ``exec_``. Disabling ``QApplication.exec_`` means your example scripts can run the", "it in a subsequent cell. ``processEvents`` is called once in case events still", "the Qt event loop (so the scripts work when run normally) without blocking", "scrapers from qtpy.QtWidgets import QApplication __all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf):", "in the sphinx-gallery configuration. \"\"\" try: # pyside-specific if qApp: qApp.shutdown() except NameError:", "for w in app.topLevelWidgets() if not w.isHidden()] rendered_imgs = [] for widg, imgpath", "pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing QApplication", "subsequent cell. ``processEvents`` is called once in case events still need to propagate.", "disable ``exec_``. Disabling ``QApplication.exec_`` means your example scripts can run the Qt event", "from sphinx_gallery import scrapers from qtpy.QtWidgets import QApplication __all__ = ['qtscraper', 'reset_qapp'] def", "sphinx_gallery import scrapers from qtpy.QtWidgets import QApplication __all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block,", "loop (so the scripts work when run normally) without blocking example execution by", "scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing QApplication and disable ``exec_``. Disabling", "windows in the current application instance and uses ``grab`` to render an image", "app = QApplication.instance() if app is None: app = QApplication([]) app.processEvents() # get", "that aren't hidden widgets = [w for w in app.topLevelWidgets() if not w.isHidden()]", "image of the window. The window is closed afterward, so you have to", "= QApplication.instance() if app is None: app = QApplication([]) app.processEvents() # get top-level", "\"\"\"Shutdown an existing QApplication and disable ``exec_``. Disabling ``QApplication.exec_`` means your example scripts", "run the Qt event loop (so the scripts work when run normally) without", "block_vars['image_path_iterator'] app = QApplication.instance() if app is None: app = QApplication([]) app.processEvents() #", "fname): \"\"\"Shutdown an existing QApplication and disable ``exec_``. Disabling ``QApplication.exec_`` means your example", "__all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation of a Qt", "w in app.topLevelWidgets() if not w.isHidden()] rendered_imgs = [] for widg, imgpath in", "not w.isHidden()] rendered_imgs = [] for widg, imgpath in zip(widgets, imgpath_iter): pixmap =", "your example scripts can run the Qt event loop (so the scripts work", "from qtpy.QtWidgets import QApplication __all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic", "``grab`` to render an image of the window. The window is closed afterward,", "# get top-level widgets that aren't hidden widgets = [w for w in", "in app.topLevelWidgets() if not w.isHidden()] rendered_imgs = [] for widg, imgpath in zip(widgets,", "<filename>qtgallery/qtscraper.py from sphinx_gallery import scrapers from qtpy.QtWidgets import QApplication __all__ = ['qtscraper', 'reset_qapp']", "use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" try: # pyside-specific if qApp:", "example scripts can run the Qt event loop (so the scripts work when", "so you have to call ``show()`` again to render it in a subsequent", "run normally) without blocking example execution by sphinx-gallery. Intended for use in ``image_scrappers``", "widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing QApplication and disable", "qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation of a Qt window scraper. Looks for any", "imgpath in zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir'])", "execution by sphinx-gallery. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\"", "again to render it in a subsequent cell. ``processEvents`` is called once in", "the sphinx-gallery configuration. \"\"\" try: # pyside-specific if qApp: qApp.shutdown() except NameError: pass", "current application instance and uses ``grab`` to render an image of the window.", "work when run normally) without blocking example execution by sphinx-gallery. Intended for use", "called once in case events still need to propagate. Intended for use in", "QApplication.instance() if app is None: app = QApplication([]) app.processEvents() # get top-level widgets", "= [w for w in app.topLevelWidgets() if not w.isHidden()] rendered_imgs = [] for", "``processEvents`` is called once in case events still need to propagate. Intended for", "app = QApplication([]) app.processEvents() # get top-level widgets that aren't hidden widgets =", "app.processEvents() # get top-level widgets that aren't hidden widgets = [w for w", "widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing", "rendered_imgs = [] for widg, imgpath in zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath)", "block_vars, gallery_conf): \"\"\"Basic implementation of a Qt window scraper. Looks for any non-hidden", "once in case events still need to propagate. Intended for use in ``image_scrappers``", "\"\"\" imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance() if app is None: app =", "pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown", "``QApplication.exec_`` means your example scripts can run the Qt event loop (so the", "scripts work when run normally) without blocking example execution by sphinx-gallery. Intended for", "[w for w in app.topLevelWidgets() if not w.isHidden()] rendered_imgs = [] for widg,", "import scrapers from qtpy.QtWidgets import QApplication __all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars,", "Looks for any non-hidden windows in the current application instance and uses ``grab``", "in the current application instance and uses ``grab`` to render an image of", "render an image of the window. The window is closed afterward, so you", "in case events still need to propagate. Intended for use in ``image_scrappers`` in", "in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance()", "without blocking example execution by sphinx-gallery. Intended for use in ``image_scrappers`` in the", "the window. The window is closed afterward, so you have to call ``show()``", "['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation of a Qt window scraper.", "sphinx-gallery. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" try: #", "rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an existing QApplication and", "in a subsequent cell. ``processEvents`` is called once in case events still need", "to render it in a subsequent cell. ``processEvents`` is called once in case", "(so the scripts work when run normally) without blocking example execution by sphinx-gallery.", "= widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): \"\"\"Shutdown an", "is called once in case events still need to propagate. Intended for use", "implementation of a Qt window scraper. Looks for any non-hidden windows in the", "and disable ``exec_``. Disabling ``QApplication.exec_`` means your example scripts can run the Qt", "for widg, imgpath in zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return", "a subsequent cell. ``processEvents`` is called once in case events still need to", "blocking example execution by sphinx-gallery. Intended for use in ``image_scrappers`` in the sphinx-gallery", "of a Qt window scraper. Looks for any non-hidden windows in the current", "hidden widgets = [w for w in app.topLevelWidgets() if not w.isHidden()] rendered_imgs =", "existing QApplication and disable ``exec_``. Disabling ``QApplication.exec_`` means your example scripts can run", "The window is closed afterward, so you have to call ``show()`` again to", "[] for widg, imgpath in zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close()", "app.topLevelWidgets() if not w.isHidden()] rendered_imgs = [] for widg, imgpath in zip(widgets, imgpath_iter):", "for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" try: # pyside-specific if", "Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator']", "if app is None: app = QApplication([]) app.processEvents() # get top-level widgets that", "an existing QApplication and disable ``exec_``. Disabling ``QApplication.exec_`` means your example scripts can", "window scraper. Looks for any non-hidden windows in the current application instance and", "for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator'] app", "to render an image of the window. The window is closed afterward, so", "= [] for widg, imgpath in zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath)", "to propagate. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" imgpath_iter", "QApplication([]) app.processEvents() # get top-level widgets that aren't hidden widgets = [w for", "can run the Qt event loop (so the scripts work when run normally)", "get top-level widgets that aren't hidden widgets = [w for w in app.topLevelWidgets()", "QApplication __all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation of a", "= block_vars['image_path_iterator'] app = QApplication.instance() if app is None: app = QApplication([]) app.processEvents()", "afterward, so you have to call ``show()`` again to render it in a", "is closed afterward, so you have to call ``show()`` again to render it", "means your example scripts can run the Qt event loop (so the scripts", "non-hidden windows in the current application instance and uses ``grab`` to render an", "sphinx-gallery configuration. \"\"\" try: # pyside-specific if qApp: qApp.shutdown() except NameError: pass QApplication.exec_", "widgets that aren't hidden widgets = [w for w in app.topLevelWidgets() if not", "Qt event loop (so the scripts work when run normally) without blocking example", "``image_scrappers`` in the sphinx-gallery configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance() if", "scraper. Looks for any non-hidden windows in the current application instance and uses", "propagate. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" imgpath_iter =", "by sphinx-gallery. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. \"\"\" try:", "you have to call ``show()`` again to render it in a subsequent cell.", "``exec_``. Disabling ``QApplication.exec_`` means your example scripts can run the Qt event loop", "configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance() if app is None: app", "``show()`` again to render it in a subsequent cell. ``processEvents`` is called once", "zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf,", "widg, imgpath in zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs,", "the sphinx-gallery configuration. \"\"\" imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance() if app is", "def qtscraper(block, block_vars, gallery_conf): \"\"\"Basic implementation of a Qt window scraper. Looks for", "if not w.isHidden()] rendered_imgs = [] for widg, imgpath in zip(widgets, imgpath_iter): pixmap", "``image_scrappers`` in the sphinx-gallery configuration. \"\"\" try: # pyside-specific if qApp: qApp.shutdown() except", "events still need to propagate. Intended for use in ``image_scrappers`` in the sphinx-gallery", "= QApplication([]) app.processEvents() # get top-level widgets that aren't hidden widgets = [w", "window. The window is closed afterward, so you have to call ``show()`` again", "case events still need to propagate. Intended for use in ``image_scrappers`` in the" ]
[ "torch.autograd import Variable # from model import AutoEncoder from utils import rolling_window class", "import torch from torch import nn from torch.autograd import Variable # from model", "self.net.eval() trainx = self.x if type(x)!=int: trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return", "= self.process(x,time_window) self.net = netr # AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(),", "1) % 50 == 0: print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item())) def", "plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show() def plote(self): # n = plt.plot((self.xi-self.xe)**2,label=\"Error\") plt.title(\"Reconstruction", "Variable(self.x) out = self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if", "e in range(Nmax): var_x = Variable(self.x) out = self.net(var_x) loss = self.criterion(out, var_x)", "xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e", "# from model import AutoEncoder from utils import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10,", "= netr # AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window):", "= xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e in range(Nmax): var_x =", "return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e in range(Nmax): var_x = Variable(self.x) out", "def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\")", "{}, Loss: {:.5f}'.format(e + 1, loss.item())) def eval(self,x=0): net = self.net.eval() trainx =", "self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain", "self.x if type(x)!=int: trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self):", "loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1) %", "out = self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e", "from utils import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x", "if (e + 1) % 50 == 0: print('Epoch: {}, Loss: {:.5f}'.format(e +", "xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data", "self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show() def plote(self):", "= nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain =", "AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window)", "+ 1) % 50 == 0: print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item()))", "lr=0.01): self.tw = time_window self.x = self.process(x,time_window) self.net = netr # AutoEncoder(time_window) self.criterion", "% 50 == 0: print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item())) def eval(self,x=0):", "class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x = self.process(x,time_window) self.net =", "net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\")", "AutoEncoder from utils import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window", "torch from torch import nn from torch.autograd import Variable # from model import", "torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e in range(Nmax): var_x = Variable(self.x) out =", "torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def", "(e + 1) % 50 == 0: print('Epoch: {}, Loss: {:.5f}'.format(e + 1,", "in range(Nmax): var_x = Variable(self.x) out = self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad()", "eval(self,x=0): net = self.net.eval() trainx = self.x if type(x)!=int: trainx = self.process(x,self.tw) xans", "plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show() def plote(self): # n = plt.plot((self.xi-self.xe)**2,label=\"Error\")", "import Variable # from model import AutoEncoder from utils import rolling_window class Trainer():", "self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show()", "net = self.net.eval() trainx = self.x if type(x)!=int: trainx = self.process(x,self.tw) xans =", "self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1)", "self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo =", "# AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain =", "self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show() def plote(self): # n =", "for e in range(Nmax): var_x = Variable(self.x) out = self.net(var_x) loss = self.criterion(out,", "lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500):", "return xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train", "def train(self,Nmax=500): self.net_loss=[] for e in range(Nmax): var_x = Variable(self.x) out = self.net(var_x)", "50 == 0: print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item())) def eval(self,x=0): net", "utils import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x =", "torch import nn from torch.autograd import Variable # from model import AutoEncoder from", "from model import AutoEncoder from utils import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01):", "= torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float()", "def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x = self.process(x,time_window) self.net = netr #", "type(x)!=int: trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi =", "rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e in range(Nmax):", "xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e in range(Nmax): var_x = Variable(self.x)", "import matplotlib.pyplot as plt import torch from torch import nn from torch.autograd import", "import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x = self.process(x,time_window)", "plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\")", "plt import torch from torch import nn from torch.autograd import Variable # from", "= self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e +", "== 0: print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item())) def eval(self,x=0): net =", "var_x = Variable(self.x) out = self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step()", "self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1) % 50 == 0: print('Epoch: {}, Loss:", "__init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x = self.process(x,time_window) self.net = netr # AutoEncoder(time_window)", "self.net = netr # AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def", "if type(x)!=int: trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi", "as np import matplotlib.pyplot as plt import torch from torch import nn from", "import numpy as np import matplotlib.pyplot as plt import torch from torch import", "matplotlib.pyplot as plt import torch from torch import nn from torch.autograd import Variable", "= self.net.eval() trainx = self.x if type(x)!=int: trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy()", "train(self,Nmax=500): self.net_loss=[] for e in range(Nmax): var_x = Variable(self.x) out = self.net(var_x) loss", "np import matplotlib.pyplot as plt import torch from torch import nn from torch.autograd", "from torch import nn from torch.autograd import Variable # from model import AutoEncoder", "netr # AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain", "as plt import torch from torch import nn from torch.autograd import Variable #", "1, loss.item())) def eval(self,x=0): net = self.net.eval() trainx = self.x if type(x)!=int: trainx", "xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval()", "<filename>dimred/models/auto/trainer.py import numpy as np import matplotlib.pyplot as plt import torch from torch", "loss.item())) def eval(self,x=0): net = self.net.eval() trainx = self.x if type(x)!=int: trainx =", "def eval(self,x=0): net = self.net.eval() trainx = self.x if type(x)!=int: trainx = self.process(x,self.tw)", "= self.x if type(x)!=int: trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def", "+ 1, loss.item())) def eval(self,x=0): net = self.net.eval() trainx = self.x if type(x)!=int:", "self.x = self.process(x,time_window) self.net = netr # AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer =", "trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0]", "rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x = self.process(x,time_window) self.net", "nn from torch.autograd import Variable # from model import AutoEncoder from utils import", "= net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\")", "self.net_loss=[] for e in range(Nmax): var_x = Variable(self.x) out = self.net(var_x) loss =", "{:.5f}'.format(e + 1, loss.item())) def eval(self,x=0): net = self.net.eval() trainx = self.x if", "numpy as np import matplotlib.pyplot as plt import torch from torch import nn", "= self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show() def plote(self): # n", "self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1) % 50 ==", "self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show() def plote(self): #", "Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x = self.process(x,time_window) self.net = netr", "Variable # from model import AutoEncoder from utils import rolling_window class Trainer(): def", "self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return", "trainx = self.x if type(x)!=int: trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1]", "Loss: {:.5f}'.format(e + 1, loss.item())) def eval(self,x=0): net = self.net.eval() trainx = self.x", "time_window self.x = self.process(x,time_window) self.net = netr # AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer", "= rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e in", "= self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo", "range(Nmax): var_x = Variable(self.x) out = self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward()", "xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e in range(Nmax): var_x", "import AutoEncoder from utils import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw =", "self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1) % 50 == 0: print('Epoch:", "0: print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item())) def eval(self,x=0): net = self.net.eval()", "= Variable(self.x) out = self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item())", "self.process(x,time_window) self.net = netr # AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01)", "nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window)", "model import AutoEncoder from utils import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw", "plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show() def plote(self): # n = plt.plot((self.xi-self.xe)**2,label=\"Error\") plt.title(\"Reconstruction Error\")", "loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1) % 50 == 0: print('Epoch: {},", "process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for", "var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1) % 50 == 0:", "= self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label=\"Input\") plt.plot(xo,label=\"Reconstruction\") plt.title(\"Train data reconstruction\") plt.xlabel(\"\") plt.show() def", "print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item())) def eval(self,x=0): net = self.net.eval() trainx", "def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[]", "self.net_loss.append(loss.item()) if (e + 1) % 50 == 0: print('Epoch: {}, Loss: {:.5f}'.format(e", "data reconstruction\") plt.xlabel(\"\") plt.show() def plote(self): # n = plt.plot((self.xi-self.xe)**2,label=\"Error\") plt.title(\"Reconstruction Error\") plt.show()", "= self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1) % 50", "self.tw = time_window self.x = self.process(x,time_window) self.net = netr # AutoEncoder(time_window) self.criterion =", "from torch.autograd import Variable # from model import AutoEncoder from utils import rolling_window", "import nn from torch.autograd import Variable # from model import AutoEncoder from utils", "= time_window self.x = self.process(x,time_window) self.net = netr # AutoEncoder(time_window) self.criterion = nn.MSELoss()" ]
[ "from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from tastypie.http import HttpForbidden from", "Meta: authentication = ApiKeyAuthentication() authorization = Authorization() list_allowed_methods = ['get'] detail_allowed_methods = ['patch']", "True include_resource_uri = False queryset = News.objects.all() fields = ['id', 'title', 'content', 'news_date']", "return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self, request): return super(NewsResource,", "Special view which enables to override the root route /user/ for accessing the", "which enables to override the root route /user/ for accessing the data of", "authenticated user's data. \"\"\" class Meta: authentication = ApiKeyAuthentication() authorization = Authorization() list_allowed_methods", "from news.models import News class NewsResource(ModelResource): \"\"\" Get and update user profile, also", "always_return_data = True include_resource_uri = False queryset = News.objects.all() fields = ['id', 'title',", "retrieving the authenticated user's data. \"\"\" class Meta: authentication = ApiKeyAuthentication() authorization =", "Get and update user profile, also serves as login route for retrieving the", "self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request,", "currently authenticated user and not the listing of all users. :param request: :param", "kwargs: :return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user', None) if not", "route, the root route /user/ is redirected to retrieving the authenticated user's data.", "the root route /user/ for accessing the data of currently authenticated user and", "accessing the data of currently authenticated user and not the listing of all", "= ['patch'] always_return_data = True include_resource_uri = False queryset = News.objects.all() fields =", "= ApiKeyAuthentication() authorization = Authorization() list_allowed_methods = ['get'] detail_allowed_methods = ['patch'] always_return_data =", "name=\"api_mark_news_read\"), ] def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): \"\"\"", "url from django.http import HttpResponse from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization", "= ['get'] detail_allowed_methods = ['patch'] always_return_data = True include_resource_uri = False queryset =", ":param request: :param kwargs: :return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user',", "] def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): \"\"\" Special", "and update user profile, also serves as login route for retrieving the ApiKey.", "authentication = ApiKeyAuthentication() authorization = Authorization() list_allowed_methods = ['get'] detail_allowed_methods = ['patch'] always_return_data", "'news_date'] def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self,", "is redirected to retrieving the authenticated user's data. \"\"\" class Meta: authentication =", ":param kwargs: :return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user', None) if", "ApiKeyAuthentication() authorization = Authorization() list_allowed_methods = ['get'] detail_allowed_methods = ['patch'] always_return_data = True", "ModelResource from news.models import News class NewsResource(ModelResource): \"\"\" Get and update user profile,", "tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from tastypie.http import HttpForbidden from tastypie.resources", "getattr(request, 'user', None) if not user or user.is_anonymous(): return HttpForbidden() News.objects.get(pk=int(kwargs['pk'])).user.add(user) return HttpResponse(status=200)", "super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): \"\"\" Special view which enables to override", "HttpResponse from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from tastypie.http import HttpForbidden", "import url from django.http import HttpResponse from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import", "enables to override the root route /user/ for accessing the data of currently", "to override the root route /user/ for accessing the data of currently authenticated", "data of currently authenticated user and not the listing of all users. :param", "import Authorization from tastypie.http import HttpForbidden from tastypie.resources import ModelResource from news.models import", "return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): \"\"\" Special view which enables to", "listing of all users. :param request: :param kwargs: :return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request)", "retrieving the ApiKey. This resource doesn't have any listing route, the root route", "\"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user', None) if not user or", "from django.http import HttpResponse from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from", "all users. :param request: :param kwargs: :return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user =", "'content', 'news_date'] def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def", "\"\"\" Special view which enables to override the root route /user/ for accessing", "prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self, request): return", "override the root route /user/ for accessing the data of currently authenticated user", "user and not the listing of all users. :param request: :param kwargs: :return:", "\"\"\" Get and update user profile, also serves as login route for retrieving", "login route for retrieving the ApiKey. This resource doesn't have any listing route,", "= Authorization() list_allowed_methods = ['get'] detail_allowed_methods = ['patch'] always_return_data = True include_resource_uri =", "users. :param request: :param kwargs: :return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request,", "for accessing the data of currently authenticated user and not the listing of", "/user/ is redirected to retrieving the authenticated user's data. \"\"\" class Meta: authentication", "as login route for retrieving the ApiKey. This resource doesn't have any listing", "redirected to retrieving the authenticated user's data. \"\"\" class Meta: authentication = ApiKeyAuthentication()", "and not the listing of all users. :param request: :param kwargs: :return: \"\"\"", "= True include_resource_uri = False queryset = News.objects.all() fields = ['id', 'title', 'content',", "request, **kwargs): \"\"\" Special view which enables to override the root route /user/", "News class NewsResource(ModelResource): \"\"\" Get and update user profile, also serves as login", "request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): \"\"\" Special view which enables", "self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user', None) if not user or user.is_anonymous():", "route /user/ for accessing the data of currently authenticated user and not the", "import News class NewsResource(ModelResource): \"\"\" Get and update user profile, also serves as", "for retrieving the ApiKey. This resource doesn't have any listing route, the root", "% self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self,", "view which enables to override the root route /user/ for accessing the data", "allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user', None) if not user or user.is_anonymous(): return", "<reponame>nicbou/markdown-notes from django.conf.urls import url from django.http import HttpResponse from tastypie.authentication import ApiKeyAuthentication", "Authorization() list_allowed_methods = ['get'] detail_allowed_methods = ['patch'] always_return_data = True include_resource_uri = False", "'title', 'content', 'news_date'] def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ]", "fields = ['id', 'title', 'content', 'news_date'] def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name,", "/user/ for accessing the data of currently authenticated user and not the listing", "authorization = Authorization() list_allowed_methods = ['get'] detail_allowed_methods = ['patch'] always_return_data = True include_resource_uri", "to retrieving the authenticated user's data. \"\"\" class Meta: authentication = ApiKeyAuthentication() authorization", "the data of currently authenticated user and not the listing of all users.", "doesn't have any listing route, the root route /user/ is redirected to retrieving", "the authenticated user's data. \"\"\" class Meta: authentication = ApiKeyAuthentication() authorization = Authorization()", "News.objects.all() fields = ['id', 'title', 'content', 'news_date'] def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" %", "of all users. :param request: :param kwargs: :return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user", "django.http import HttpResponse from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from tastypie.http", "profile, also serves as login route for retrieving the ApiKey. This resource doesn't", "from tastypie.resources import ModelResource from news.models import News class NewsResource(ModelResource): \"\"\" Get and", "news.models import News class NewsResource(ModelResource): \"\"\" Get and update user profile, also serves", "from tastypie.authorization import Authorization from tastypie.http import HttpForbidden from tastypie.resources import ModelResource from", "route /user/ is redirected to retrieving the authenticated user's data. \"\"\" class Meta:", "= News.objects.all() fields = ['id', 'title', 'content', 'news_date'] def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\"", "= False queryset = News.objects.all() fields = ['id', 'title', 'content', 'news_date'] def prepend_urls(self):", "['patch'] always_return_data = True include_resource_uri = False queryset = News.objects.all() fields = ['id',", "['id', 'title', 'content', 'news_date'] def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"),", "the ApiKey. This resource doesn't have any listing route, the root route /user/", "HttpForbidden from tastypie.resources import ModelResource from news.models import News class NewsResource(ModelResource): \"\"\" Get", "self.is_authenticated(request) user = getattr(request, 'user', None) if not user or user.is_anonymous(): return HttpForbidden()", "root route /user/ is redirected to retrieving the authenticated user's data. \"\"\" class", "['get'] detail_allowed_methods = ['patch'] always_return_data = True include_resource_uri = False queryset = News.objects.all()", "resource doesn't have any listing route, the root route /user/ is redirected to", "import ModelResource from news.models import News class NewsResource(ModelResource): \"\"\" Get and update user", "root route /user/ for accessing the data of currently authenticated user and not", "self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs):", "authenticated user and not the listing of all users. :param request: :param kwargs:", "ApiKey. This resource doesn't have any listing route, the root route /user/ is", "detail_allowed_methods = ['patch'] always_return_data = True include_resource_uri = False queryset = News.objects.all() fields", "tastypie.resources import ModelResource from news.models import News class NewsResource(ModelResource): \"\"\" Get and update", "the listing of all users. :param request: :param kwargs: :return: \"\"\" self.method_check(request, allowed=['patch'])", "data. \"\"\" class Meta: authentication = ApiKeyAuthentication() authorization = Authorization() list_allowed_methods = ['get']", "\"\"\" class Meta: authentication = ApiKeyAuthentication() authorization = Authorization() list_allowed_methods = ['get'] detail_allowed_methods", "import HttpForbidden from tastypie.resources import ModelResource from news.models import News class NewsResource(ModelResource): \"\"\"", "any listing route, the root route /user/ is redirected to retrieving the authenticated", "update user profile, also serves as login route for retrieving the ApiKey. This", "queryset = News.objects.all() fields = ['id', 'title', 'content', 'news_date'] def prepend_urls(self): return [", "have any listing route, the root route /user/ is redirected to retrieving the", "user profile, also serves as login route for retrieving the ApiKey. This resource", "= ['id', 'title', 'content', 'news_date'] def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'),", "**kwargs): \"\"\" Special view which enables to override the root route /user/ for", "import ApiKeyAuthentication from tastypie.authorization import Authorization from tastypie.http import HttpForbidden from tastypie.resources import", "tastypie.http import HttpForbidden from tastypie.resources import ModelResource from news.models import News class NewsResource(ModelResource):", "This resource doesn't have any listing route, the root route /user/ is redirected", "include_resource_uri = False queryset = News.objects.all() fields = ['id', 'title', 'content', 'news_date'] def", "def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): \"\"\" Special view", "ApiKeyAuthentication from tastypie.authorization import Authorization from tastypie.http import HttpForbidden from tastypie.resources import ModelResource", "request: :param kwargs: :return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user', None)", "mark_news_read(self, request, **kwargs): \"\"\" Special view which enables to override the root route", "from tastypie.http import HttpForbidden from tastypie.resources import ModelResource from news.models import News class", "user = getattr(request, 'user', None) if not user or user.is_anonymous(): return HttpForbidden() News.objects.get(pk=int(kwargs['pk'])).user.add(user)", "import HttpResponse from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from tastypie.http import", "listing route, the root route /user/ is redirected to retrieving the authenticated user's", "def prepend_urls(self): return [ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self, request):", "class Meta: authentication = ApiKeyAuthentication() authorization = Authorization() list_allowed_methods = ['get'] detail_allowed_methods =", "False queryset = News.objects.all() fields = ['id', 'title', 'content', 'news_date'] def prepend_urls(self): return", "[ url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user)", "from django.conf.urls import url from django.http import HttpResponse from tastypie.authentication import ApiKeyAuthentication from", "django.conf.urls import url from django.http import HttpResponse from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization", "= getattr(request, 'user', None) if not user or user.is_anonymous(): return HttpForbidden() News.objects.get(pk=int(kwargs['pk'])).user.add(user) return", "def mark_news_read(self, request, **kwargs): \"\"\" Special view which enables to override the root", "the root route /user/ is redirected to retrieving the authenticated user's data. \"\"\"", "also serves as login route for retrieving the ApiKey. This resource doesn't have", "of currently authenticated user and not the listing of all users. :param request:", "route for retrieving the ApiKey. This resource doesn't have any listing route, the", ":return: \"\"\" self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user', None) if not user", "Authorization from tastypie.http import HttpForbidden from tastypie.resources import ModelResource from news.models import News", "NewsResource(ModelResource): \"\"\" Get and update user profile, also serves as login route for", "self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): \"\"\" Special view which enables to override the", "serves as login route for retrieving the ApiKey. This resource doesn't have any", "class NewsResource(ModelResource): \"\"\" Get and update user profile, also serves as login route", "get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): \"\"\" Special view which", "user's data. \"\"\" class Meta: authentication = ApiKeyAuthentication() authorization = Authorization() list_allowed_methods =", "list_allowed_methods = ['get'] detail_allowed_methods = ['patch'] always_return_data = True include_resource_uri = False queryset", "url(r\"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$\" % self._meta.resource_name, self.wrap_view('mark_news_read'), name=\"api_mark_news_read\"), ] def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def", "not the listing of all users. :param request: :param kwargs: :return: \"\"\" self.method_check(request,", "tastypie.authorization import Authorization from tastypie.http import HttpForbidden from tastypie.resources import ModelResource from news.models" ]
[ "ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus", "ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode", "[ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(),", "SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend", "import ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import", "ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(),", "ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock", "SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd", "apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(),", "SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent", "from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(),", "ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs", "from SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends from", "import ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import", "from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing from", "SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme", "import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import", "from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus from", "ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(),", "from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor from", "ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave", "from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock from", "ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(), ApiPing(), ApiFriends(), ApiRecent(), ApiAllfor(), ApiAddFriend(),", "from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from", "ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(),", "ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss", "= [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(),", "SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(),", "SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer", "<filename>SHIMON/api/api_calls.py<gh_stars>0 from SHIMON.api.external import api_recent, api_friends, api_allfor from SHIMON.api.error import error, error_200, error_202,", "SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status", "import error, error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg", "ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends", "from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke from", "import ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends import", "ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(),", "SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode", "SHIMON.api.external import api_recent, api_friends, api_allfor from SHIMON.api.error import error, error_200, error_202, error_400 from", "SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key", "SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base", "import ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import", "SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css", "ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor", "import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import", "ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(), ApiPing(), ApiFriends(), ApiRecent(), ApiAllfor(),", "ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(),", "SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save", "from SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent from", "import ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import", "error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg", "import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import", "ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke", "from SHIMON.api.external import api_recent, api_friends, api_allfor from SHIMON.api.error import error, error_200, error_202, error_400", "import ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import", "import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import", "SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls =", "ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(),", "SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(),", "SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock", "api_friends, api_allfor from SHIMON.api.error import error, error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock", "ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(), ApiPing(),", "from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from", "SHIMON.api.error import error, error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import", "import api_recent, api_friends, api_allfor from SHIMON.api.error import error, error_200, error_202, error_400 from SHIMON.api.unlock", "import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls = [", "api_allfor from SHIMON.api.error import error, error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock from", "ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase", "import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import", "SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends", "from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from", "import ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import", "from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from", "ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey", "SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy", "from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from", "from SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from", "error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg", "from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode from", "from SHIMON.api.error import error, error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg", "SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js", "from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from", "import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import", "from SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(),", "error_202, error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import", "ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer", "from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave from", "import ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import", "SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke", "ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd", "from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from", "SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor", "ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(), ApiPing(), ApiFriends(),", "ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend", "import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping import", "api_recent, api_friends, api_allfor from SHIMON.api.error import error, error_200, error_202, error_400 from SHIMON.api.unlock import", "import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import", "import ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import", "from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls", "error, error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from", "import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(),", "from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from", "SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping", "ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme", "ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing", "import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock import", "ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy", "ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(), ApiPing(), ApiFriends(), ApiRecent(), ApiAllfor(), ApiAddFriend(), ]", "ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent", "import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(),", "ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(), ApiPing(), ApiFriends(), ApiRecent()," ]
[ "and 3 columns \"\"\" test_path = tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try: assert", "division from __future__ import print_function import shutil import sys import tempfile from observations.r.sparrows", "module sparrows.py by downloading sparrows.csv and testing shape of extracted data has 116", "columns \"\"\" test_path = tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try: assert x_train.shape ==", "\"\"\"Test module sparrows.py by downloading sparrows.csv and testing shape of extracted data has", "import absolute_import from __future__ import division from __future__ import print_function import shutil import", "shape of extracted data has 116 rows and 3 columns \"\"\" test_path =", "sparrows def test_sparrows(): \"\"\"Test module sparrows.py by downloading sparrows.csv and testing shape of", "x_train, metadata = sparrows(test_path) try: assert x_train.shape == (116, 3) except: shutil.rmtree(test_path) raise()", "tempfile from observations.r.sparrows import sparrows def test_sparrows(): \"\"\"Test module sparrows.py by downloading sparrows.csv", "def test_sparrows(): \"\"\"Test module sparrows.py by downloading sparrows.csv and testing shape of extracted", "= tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try: assert x_train.shape == (116, 3) except:", "import shutil import sys import tempfile from observations.r.sparrows import sparrows def test_sparrows(): \"\"\"Test", "shutil import sys import tempfile from observations.r.sparrows import sparrows def test_sparrows(): \"\"\"Test module", "test_sparrows(): \"\"\"Test module sparrows.py by downloading sparrows.csv and testing shape of extracted data", "from __future__ import print_function import shutil import sys import tempfile from observations.r.sparrows import", "from observations.r.sparrows import sparrows def test_sparrows(): \"\"\"Test module sparrows.py by downloading sparrows.csv and", "absolute_import from __future__ import division from __future__ import print_function import shutil import sys", "testing shape of extracted data has 116 rows and 3 columns \"\"\" test_path", "print_function import shutil import sys import tempfile from observations.r.sparrows import sparrows def test_sparrows():", "sys import tempfile from observations.r.sparrows import sparrows def test_sparrows(): \"\"\"Test module sparrows.py by", "from __future__ import absolute_import from __future__ import division from __future__ import print_function import", "rows and 3 columns \"\"\" test_path = tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try:", "__future__ import division from __future__ import print_function import shutil import sys import tempfile", "sparrows.py by downloading sparrows.csv and testing shape of extracted data has 116 rows", "import print_function import shutil import sys import tempfile from observations.r.sparrows import sparrows def", "\"\"\" test_path = tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try: assert x_train.shape == (116,", "of extracted data has 116 rows and 3 columns \"\"\" test_path = tempfile.mkdtemp()", "test_path = tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try: assert x_train.shape == (116, 3)", "import tempfile from observations.r.sparrows import sparrows def test_sparrows(): \"\"\"Test module sparrows.py by downloading", "downloading sparrows.csv and testing shape of extracted data has 116 rows and 3", "sparrows.csv and testing shape of extracted data has 116 rows and 3 columns", "and testing shape of extracted data has 116 rows and 3 columns \"\"\"", "__future__ import print_function import shutil import sys import tempfile from observations.r.sparrows import sparrows", "__future__ import absolute_import from __future__ import division from __future__ import print_function import shutil", "3 columns \"\"\" test_path = tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try: assert x_train.shape", "tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try: assert x_train.shape == (116, 3) except: shutil.rmtree(test_path)", "from __future__ import division from __future__ import print_function import shutil import sys import", "has 116 rows and 3 columns \"\"\" test_path = tempfile.mkdtemp() x_train, metadata =", "data has 116 rows and 3 columns \"\"\" test_path = tempfile.mkdtemp() x_train, metadata", "extracted data has 116 rows and 3 columns \"\"\" test_path = tempfile.mkdtemp() x_train,", "import sys import tempfile from observations.r.sparrows import sparrows def test_sparrows(): \"\"\"Test module sparrows.py", "import sparrows def test_sparrows(): \"\"\"Test module sparrows.py by downloading sparrows.csv and testing shape", "by downloading sparrows.csv and testing shape of extracted data has 116 rows and", "import division from __future__ import print_function import shutil import sys import tempfile from", "observations.r.sparrows import sparrows def test_sparrows(): \"\"\"Test module sparrows.py by downloading sparrows.csv and testing", "116 rows and 3 columns \"\"\" test_path = tempfile.mkdtemp() x_train, metadata = sparrows(test_path)" ]
[ "<gh_stars>1-10 from django.contrib import admin def raw_id_fields_admin(*args): fields = args class cls(admin.ModelAdmin): raw_id_fields", "admin def raw_id_fields_admin(*args): fields = args class cls(admin.ModelAdmin): raw_id_fields = fields return cls", "from django.contrib import admin def raw_id_fields_admin(*args): fields = args class cls(admin.ModelAdmin): raw_id_fields =", "django.contrib import admin def raw_id_fields_admin(*args): fields = args class cls(admin.ModelAdmin): raw_id_fields = fields", "import admin def raw_id_fields_admin(*args): fields = args class cls(admin.ModelAdmin): raw_id_fields = fields return" ]
[ "for x in p.split(\"=\")) for p in trace_state.split(\",\")) return SpanContext(trace_id, span_id, sampled, baggage)", "from .context import SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when the provided span", "extract(self, carrier): trace_parent = carrier.get(\"trace-parent\", None) if trace_parent is None: return None try:", "Propagator for Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier): if span_context is None: return if", "= dict((x.strip() for x in p.split(\"=\")) for p in trace_state.split(\",\")) return SpanContext(trace_id, span_id,", "context instance does is not a W3C span context.\"\"\" pass class TextPropagator(object): \"\"\"A", "span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled else \"-00\")) if span_context.baggage", "is not None: baggage = dict((x.strip() for x in p.split(\"=\")) for p in", "not None: carrier[\"trace-state\"] = \",\".join(k+\"=\" + v for (k, v) in span_context.baggage.items()) def", "bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version != \"00\" or sampled not in (\"00\",", "carrier.get(\"trace-parent\", None) if trace_parent is None: return None try: (version, trace_id, span_id, sampled)", "(\"00-\" + span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled else \"-00\"))", "trace_state = carrier.get(\"trace-state\", None) if trace_state is not None: baggage = dict((x.strip() for", "None) if trace_parent is None: return None try: (version, trace_id, span_id, sampled) =", "!= \"00\" or sampled not in (\"00\", \"01\") or len(trace_id) != 16 or", "= (\"00-\" + span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled else", "import SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when the provided span context instance", "baggage = None trace_state = carrier.get(\"trace-state\", None) if trace_state is not None: baggage", "\"\"\"A W3C Trace Context compatible Propagator for Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier): if", "carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled", "span_context.sampled else \"-00\")) if span_context.baggage is not None: carrier[\"trace-state\"] = \",\".join(k+\"=\" + v", "trace_id, span_id, sampled) = trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version", "sampled) = trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version != \"00\"", "for Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier): if span_context is None: return if not", "Trace Context compatible Propagator for Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier): if span_context is", "span_id = bytes.fromhex(span_id) if version != \"00\" or sampled not in (\"00\", \"01\")", "\"-00\")) if span_context.baggage is not None: carrier[\"trace-state\"] = \",\".join(k+\"=\" + v for (k,", "dict((x.strip() for x in p.split(\"=\")) for p in trace_state.split(\",\")) return SpanContext(trace_id, span_id, sampled,", "carrier): trace_parent = carrier.get(\"trace-parent\", None) if trace_parent is None: return None try: (version,", "None: baggage = dict((x.strip() for x in p.split(\"=\")) for p in trace_state.split(\",\")) return", "None) if trace_state is not None: baggage = dict((x.strip() for x in p.split(\"=\"))", "trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version != \"00\" or sampled", "if not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper() + \"-\"", "import SpanContextCorruptedException from .context import SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when the", "\"01\") or len(trace_id) != 16 or len(span_id) != 8: raise SpanContextCorruptedException() sampled =", "instance does is not a W3C span context.\"\"\" pass class TextPropagator(object): \"\"\"A W3C", "not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper() + \"-\" +", "raise SpanContextCorruptedException() sampled = (sampled == \"01\") baggage = None trace_state = carrier.get(\"trace-state\",", "== \"01\") baggage = None trace_state = carrier.get(\"trace-state\", None) if trace_state is not", "not None: baggage = dict((x.strip() for x in p.split(\"=\")) for p in trace_state.split(\",\"))", "16 or len(span_id) != 8: raise SpanContextCorruptedException() sampled = (sampled == \"01\") baggage", "trace_state is not None: baggage = dict((x.strip() for x in p.split(\"=\")) for p", "\"00\" or sampled not in (\"00\", \"01\") or len(trace_id) != 16 or len(span_id)", "span context.\"\"\" pass class TextPropagator(object): \"\"\"A W3C Trace Context compatible Propagator for Format.TEXT_MAP.\"\"\"", "\"-\" + span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled else \"-00\")) if span_context.baggage is not", "!= 8: raise SpanContextCorruptedException() sampled = (sampled == \"01\") baggage = None trace_state", "if trace_state is not None: baggage = dict((x.strip() for x in p.split(\"=\")) for", "or sampled not in (\"00\", \"01\") or len(trace_id) != 16 or len(span_id) !=", "used when the provided span context instance does is not a W3C span", "TextPropagator(object): \"\"\"A W3C Trace Context compatible Propagator for Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier):", "a W3C span context.\"\"\" pass class TextPropagator(object): \"\"\"A W3C Trace Context compatible Propagator", "span_context.baggage is not None: carrier[\"trace-state\"] = \",\".join(k+\"=\" + v for (k, v) in", "in p.split(\"=\")) for p in trace_state.split(\",\")) return SpanContext(trace_id, span_id, sampled, baggage) except ValueError:", "sampled = (sampled == \"01\") baggage = None trace_state = carrier.get(\"trace-state\", None) if", "context.\"\"\" pass class TextPropagator(object): \"\"\"A W3C Trace Context compatible Propagator for Format.TEXT_MAP.\"\"\" def", "= bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version != \"00\" or sampled not in", "or len(trace_id) != 16 or len(span_id) != 8: raise SpanContextCorruptedException() sampled = (sampled", "SpanContextCorruptedException() sampled = (sampled == \"01\") baggage = None trace_state = carrier.get(\"trace-state\", None)", "= trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version != \"00\" or", "+ v for (k, v) in span_context.baggage.items()) def extract(self, carrier): trace_parent = carrier.get(\"trace-parent\",", "SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when the provided span context instance does", "None: return if not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper()", "(\"-01\" if span_context.sampled else \"-00\")) if span_context.baggage is not None: carrier[\"trace-state\"] = \",\".join(k+\"=\"", "= \",\".join(k+\"=\" + v for (k, v) in span_context.baggage.items()) def extract(self, carrier): trace_parent", "= carrier.get(\"trace-state\", None) if trace_state is not None: baggage = dict((x.strip() for x", "span_context, carrier): if span_context is None: return if not isinstance(span_context, SpanContext): raise InvalidSpanContextException()", "W3C span context.\"\"\" pass class TextPropagator(object): \"\"\"A W3C Trace Context compatible Propagator for", "(k, v) in span_context.baggage.items()) def extract(self, carrier): trace_parent = carrier.get(\"trace-parent\", None) if trace_parent", "pass class TextPropagator(object): \"\"\"A W3C Trace Context compatible Propagator for Format.TEXT_MAP.\"\"\" def inject(self,", "trace_parent = carrier.get(\"trace-parent\", None) if trace_parent is None: return None try: (version, trace_id,", "for p in trace_state.split(\",\")) return SpanContext(trace_id, span_id, sampled, baggage) except ValueError: raise SpanContextCorruptedException()", "\"\"\"InvalidSpanContextException is used when the provided span context instance does is not a", "does is not a W3C span context.\"\"\" pass class TextPropagator(object): \"\"\"A W3C Trace", "\"01\") baggage = None trace_state = carrier.get(\"trace-state\", None) if trace_state is not None:", "or len(span_id) != 8: raise SpanContextCorruptedException() sampled = (sampled == \"01\") baggage =", "the provided span context instance does is not a W3C span context.\"\"\" pass", "trace_parent is None: return None try: (version, trace_id, span_id, sampled) = trace_parent.split(\"-\") trace_id", "not in (\"00\", \"01\") or len(trace_id) != 16 or len(span_id) != 8: raise", "span_context is None: return if not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\"", "compatible Propagator for Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier): if span_context is None: return", "= carrier.get(\"trace-parent\", None) if trace_parent is None: return None try: (version, trace_id, span_id,", "len(span_id) != 8: raise SpanContextCorruptedException() sampled = (sampled == \"01\") baggage = None", "def inject(self, span_context, carrier): if span_context is None: return if not isinstance(span_context, SpanContext):", "baggage = dict((x.strip() for x in p.split(\"=\")) for p in trace_state.split(\",\")) return SpanContext(trace_id,", "span context instance does is not a W3C span context.\"\"\" pass class TextPropagator(object):", "is not None: carrier[\"trace-state\"] = \",\".join(k+\"=\" + v for (k, v) in span_context.baggage.items())", "is not a W3C span context.\"\"\" pass class TextPropagator(object): \"\"\"A W3C Trace Context", "InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when the provided span context instance does is not", "None: carrier[\"trace-state\"] = \",\".join(k+\"=\" + v for (k, v) in span_context.baggage.items()) def extract(self,", "if span_context.baggage is not None: carrier[\"trace-state\"] = \",\".join(k+\"=\" + v for (k, v)", "sampled not in (\"00\", \"01\") or len(trace_id) != 16 or len(span_id) != 8:", "= None trace_state = carrier.get(\"trace-state\", None) if trace_state is not None: baggage =", "span_id, sampled) = trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version !=", "= (sampled == \"01\") baggage = None trace_state = carrier.get(\"trace-state\", None) if trace_state", "when the provided span context instance does is not a W3C span context.\"\"\"", "\",\".join(k+\"=\" + v for (k, v) in span_context.baggage.items()) def extract(self, carrier): trace_parent =", "+ span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled else \"-00\")) if span_context.baggage is not None:", "None: return None try: (version, trace_id, span_id, sampled) = trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id)", "W3C Trace Context compatible Propagator for Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier): if span_context", "(version, trace_id, span_id, sampled) = trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if", "try: (version, trace_id, span_id, sampled) = trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id)", "+ \"-\" + span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled else \"-00\")) if span_context.baggage is", "carrier[\"trace-state\"] = \",\".join(k+\"=\" + v for (k, v) in span_context.baggage.items()) def extract(self, carrier):", "len(trace_id) != 16 or len(span_id) != 8: raise SpanContextCorruptedException() sampled = (sampled ==", "if trace_parent is None: return None try: (version, trace_id, span_id, sampled) = trace_parent.split(\"-\")", "raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper() + (\"-01\"", "class TextPropagator(object): \"\"\"A W3C Trace Context compatible Propagator for Format.TEXT_MAP.\"\"\" def inject(self, span_context,", "8: raise SpanContextCorruptedException() sampled = (sampled == \"01\") baggage = None trace_state =", "for (k, v) in span_context.baggage.items()) def extract(self, carrier): trace_parent = carrier.get(\"trace-parent\", None) if", "v for (k, v) in span_context.baggage.items()) def extract(self, carrier): trace_parent = carrier.get(\"trace-parent\", None)", "SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper() +", "opentracing import SpanContextCorruptedException from .context import SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when", "isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper()", "if version != \"00\" or sampled not in (\"00\", \"01\") or len(trace_id) !=", "version != \"00\" or sampled not in (\"00\", \"01\") or len(trace_id) != 16", "return None try: (version, trace_id, span_id, sampled) = trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id", "x in p.split(\"=\")) for p in trace_state.split(\",\")) return SpanContext(trace_id, span_id, sampled, baggage) except", "span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled else \"-00\")) if span_context.baggage is not None: carrier[\"trace-state\"]", "+ span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper() + (\"-01\" if span_context.sampled else \"-00\")) if", "in span_context.baggage.items()) def extract(self, carrier): trace_parent = carrier.get(\"trace-parent\", None) if trace_parent is None:", "Context compatible Propagator for Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier): if span_context is None:", "if span_context is None: return if not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] =", "is None: return None try: (version, trace_id, span_id, sampled) = trace_parent.split(\"-\") trace_id =", "provided span context instance does is not a W3C span context.\"\"\" pass class", "if span_context.sampled else \"-00\")) if span_context.baggage is not None: carrier[\"trace-state\"] = \",\".join(k+\"=\" +", "(\"00\", \"01\") or len(trace_id) != 16 or len(span_id) != 8: raise SpanContextCorruptedException() sampled", "None trace_state = carrier.get(\"trace-state\", None) if trace_state is not None: baggage = dict((x.strip()", "else \"-00\")) if span_context.baggage is not None: carrier[\"trace-state\"] = \",\".join(k+\"=\" + v for", "carrier): if span_context is None: return if not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"]", "is None: return if not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" +", "p.split(\"=\")) for p in trace_state.split(\",\")) return SpanContext(trace_id, span_id, sampled, baggage) except ValueError: raise", "None try: (version, trace_id, span_id, sampled) = trace_parent.split(\"-\") trace_id = bytes.fromhex(trace_id) span_id =", "!= 16 or len(span_id) != 8: raise SpanContextCorruptedException() sampled = (sampled == \"01\")", "<filename>w3copentracing/text_propagator.py from opentracing import SpanContextCorruptedException from .context import SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is", "v) in span_context.baggage.items()) def extract(self, carrier): trace_parent = carrier.get(\"trace-parent\", None) if trace_parent is", "bytes.fromhex(span_id) if version != \"00\" or sampled not in (\"00\", \"01\") or len(trace_id)", "in (\"00\", \"01\") or len(trace_id) != 16 or len(span_id) != 8: raise SpanContextCorruptedException()", "trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version != \"00\" or sampled not", "inject(self, span_context, carrier): if span_context is None: return if not isinstance(span_context, SpanContext): raise", "SpanContextCorruptedException from .context import SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when the provided", "class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when the provided span context instance does is", "def extract(self, carrier): trace_parent = carrier.get(\"trace-parent\", None) if trace_parent is None: return None", "= bytes.fromhex(span_id) if version != \"00\" or sampled not in (\"00\", \"01\") or", "carrier.get(\"trace-state\", None) if trace_state is not None: baggage = dict((x.strip() for x in", "(sampled == \"01\") baggage = None trace_state = carrier.get(\"trace-state\", None) if trace_state is", "+ (\"-01\" if span_context.sampled else \"-00\")) if span_context.baggage is not None: carrier[\"trace-state\"] =", "is used when the provided span context instance does is not a W3C", "from opentracing import SpanContextCorruptedException from .context import SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used", "return if not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper() +", "Format.TEXT_MAP.\"\"\" def inject(self, span_context, carrier): if span_context is None: return if not isinstance(span_context,", ".context import SpanContext class InvalidSpanContextException(Exception): \"\"\"InvalidSpanContextException is used when the provided span context", "span_context.baggage.items()) def extract(self, carrier): trace_parent = carrier.get(\"trace-parent\", None) if trace_parent is None: return", "InvalidSpanContextException() carrier[\"trace-parent\"] = (\"00-\" + span_context.trace_id.hex().upper() + \"-\" + span_context.span_id.hex().upper() + (\"-01\" if", "not a W3C span context.\"\"\" pass class TextPropagator(object): \"\"\"A W3C Trace Context compatible" ]
[ "coding: utf-8 -*- ''' @Time : 2021/08/08 @Author : Yanyuxiang @Email : <EMAIL>", "2021/08/08 @Author : Yanyuxiang @Email : <EMAIL> @FileName: __init__.py @Software: PyCharm ''' from", "''' from . import list from . import video from . import file", "@FileName: __init__.py @Software: PyCharm ''' from . import list from . import video", "@Email : <EMAIL> @FileName: __init__.py @Software: PyCharm ''' from . import list from", "# -*- coding: utf-8 -*- ''' @Time : 2021/08/08 @Author : Yanyuxiang @Email", "-*- ''' @Time : 2021/08/08 @Author : Yanyuxiang @Email : <EMAIL> @FileName: __init__.py", "-*- coding: utf-8 -*- ''' @Time : 2021/08/08 @Author : Yanyuxiang @Email :", "<EMAIL> @FileName: __init__.py @Software: PyCharm ''' from . import list from . import", "<reponame>yanyuxiangToday/yyxutils # -*- coding: utf-8 -*- ''' @Time : 2021/08/08 @Author : Yanyuxiang", ": <EMAIL> @FileName: __init__.py @Software: PyCharm ''' from . import list from .", "from . import list from . import video from . import file from", "@Author : Yanyuxiang @Email : <EMAIL> @FileName: __init__.py @Software: PyCharm ''' from .", "Yanyuxiang @Email : <EMAIL> @FileName: __init__.py @Software: PyCharm ''' from . import list", "list from . import video from . import file from .utils import *", "@Time : 2021/08/08 @Author : Yanyuxiang @Email : <EMAIL> @FileName: __init__.py @Software: PyCharm", "utf-8 -*- ''' @Time : 2021/08/08 @Author : Yanyuxiang @Email : <EMAIL> @FileName:", ": Yanyuxiang @Email : <EMAIL> @FileName: __init__.py @Software: PyCharm ''' from . import", ". import list from . import video from . import file from .utils", "__init__.py @Software: PyCharm ''' from . import list from . import video from", "@Software: PyCharm ''' from . import list from . import video from .", "PyCharm ''' from . import list from . import video from . import", "import list from . import video from . import file from .utils import", ": 2021/08/08 @Author : Yanyuxiang @Email : <EMAIL> @FileName: __init__.py @Software: PyCharm '''", "''' @Time : 2021/08/08 @Author : Yanyuxiang @Email : <EMAIL> @FileName: __init__.py @Software:" ]
[ "Topology from autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool:", "host in hosts: peers = host.peer_nodes() if not any(n.type == DeviceType.ROUTER for n", "bool: \"\"\" @param topology: @return: \"\"\" valid = True components = connected_components(topology) if", "filters.routers(topology) routers_present = len(routers) > 0 if routers_present: # check that all hosts", "a router hosts = filters.hosts(topology) for host in hosts: peers = host.peer_nodes() if", "= filters.routers(topology) routers_present = len(routers) > 0 if routers_present: # check that all", "%s is not connected to a router\", host.label) valid = False return valid", "autonetkit.design.utils import filters from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import Topology from autonetkit.network_model.types", "\"\"\" valid = True components = connected_components(topology) if len(components) > 1: logger.warning(\"Disconnected network:", "= logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool: \"\"\" @param topology: @return: \"\"\" valid", "import logging from autonetkit.design.utils import filters from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import", "from autonetkit.network_model.topology import Topology from autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology:", "Topology) -> bool: \"\"\" @param topology: @return: \"\"\" valid = True components =", "to a router hosts = filters.hosts(topology) for host in hosts: peers = host.peer_nodes()", "routers_present = len(routers) > 0 if routers_present: # check that all hosts connect", "if routers_present: # check that all hosts connect to a router hosts =", "> 1: logger.warning(\"Disconnected network: %s components\", len(components)) valid = False routers = filters.routers(topology)", "True components = connected_components(topology) if len(components) > 1: logger.warning(\"Disconnected network: %s components\", len(components))", "for n in peers): logger.warning(\"Host %s is not connected to a router\", host.label)", "%s components\", len(components)) valid = False routers = filters.routers(topology) routers_present = len(routers) >", "<filename>autonetkit/design/validation/validation.py import logging from autonetkit.design.utils import filters from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology", "components = connected_components(topology) if len(components) > 1: logger.warning(\"Disconnected network: %s components\", len(components)) valid", "= connected_components(topology) if len(components) > 1: logger.warning(\"Disconnected network: %s components\", len(components)) valid =", "autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import Topology from autonetkit.network_model.types import DeviceType logger =", "# check that all hosts connect to a router hosts = filters.hosts(topology) for", "import connected_components from autonetkit.network_model.topology import Topology from autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__)", "autonetkit.network_model.topology import Topology from autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology)", "from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import Topology from autonetkit.network_model.types import DeviceType logger", "if not any(n.type == DeviceType.ROUTER for n in peers): logger.warning(\"Host %s is not", "in peers): logger.warning(\"Host %s is not connected to a router\", host.label) valid =", "len(components)) valid = False routers = filters.routers(topology) routers_present = len(routers) > 0 if", "components\", len(components)) valid = False routers = filters.routers(topology) routers_present = len(routers) > 0", "router hosts = filters.hosts(topology) for host in hosts: peers = host.peer_nodes() if not", "that all hosts connect to a router hosts = filters.hosts(topology) for host in", "= host.peer_nodes() if not any(n.type == DeviceType.ROUTER for n in peers): logger.warning(\"Host %s", "hosts connect to a router hosts = filters.hosts(topology) for host in hosts: peers", "= filters.hosts(topology) for host in hosts: peers = host.peer_nodes() if not any(n.type ==", "logger.warning(\"Host %s is not connected to a router\", host.label) valid = False return", "autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool: \"\"\" @param", "len(routers) > 0 if routers_present: # check that all hosts connect to a", "logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool: \"\"\" @param topology: @return: \"\"\" valid =", "import filters from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import Topology from autonetkit.network_model.types import", "routers_present: # check that all hosts connect to a router hosts = filters.hosts(topology)", "peers = host.peer_nodes() if not any(n.type == DeviceType.ROUTER for n in peers): logger.warning(\"Host", "import DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool: \"\"\" @param topology:", "== DeviceType.ROUTER for n in peers): logger.warning(\"Host %s is not connected to a", "for host in hosts: peers = host.peer_nodes() if not any(n.type == DeviceType.ROUTER for", "topology: @return: \"\"\" valid = True components = connected_components(topology) if len(components) > 1:", "any(n.type == DeviceType.ROUTER for n in peers): logger.warning(\"Host %s is not connected to", "1: logger.warning(\"Disconnected network: %s components\", len(components)) valid = False routers = filters.routers(topology) routers_present", "host.peer_nodes() if not any(n.type == DeviceType.ROUTER for n in peers): logger.warning(\"Host %s is", "connected_components from autonetkit.network_model.topology import Topology from autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__) def", "all hosts connect to a router hosts = filters.hosts(topology) for host in hosts:", "valid = True components = connected_components(topology) if len(components) > 1: logger.warning(\"Disconnected network: %s", "hosts: peers = host.peer_nodes() if not any(n.type == DeviceType.ROUTER for n in peers):", "logging from autonetkit.design.utils import filters from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import Topology", "len(components) > 1: logger.warning(\"Disconnected network: %s components\", len(components)) valid = False routers =", "= True components = connected_components(topology) if len(components) > 1: logger.warning(\"Disconnected network: %s components\",", "logger.warning(\"Disconnected network: %s components\", len(components)) valid = False routers = filters.routers(topology) routers_present =", "\"\"\" @param topology: @return: \"\"\" valid = True components = connected_components(topology) if len(components)", "network: %s components\", len(components)) valid = False routers = filters.routers(topology) routers_present = len(routers)", "DeviceType.ROUTER for n in peers): logger.warning(\"Host %s is not connected to a router\",", "routers = filters.routers(topology) routers_present = len(routers) > 0 if routers_present: # check that", "= False routers = filters.routers(topology) routers_present = len(routers) > 0 if routers_present: #", "if len(components) > 1: logger.warning(\"Disconnected network: %s components\", len(components)) valid = False routers", "from autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool: \"\"\"", "peers): logger.warning(\"Host %s is not connected to a router\", host.label) valid = False", "from autonetkit.design.utils import filters from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import Topology from", "import Topology from autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology) ->", "@return: \"\"\" valid = True components = connected_components(topology) if len(components) > 1: logger.warning(\"Disconnected", "check_layer2_conn(topology: Topology) -> bool: \"\"\" @param topology: @return: \"\"\" valid = True components", "connect to a router hosts = filters.hosts(topology) for host in hosts: peers =", "False routers = filters.routers(topology) routers_present = len(routers) > 0 if routers_present: # check", "= len(routers) > 0 if routers_present: # check that all hosts connect to", "filters from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import Topology from autonetkit.network_model.types import DeviceType", "@param topology: @return: \"\"\" valid = True components = connected_components(topology) if len(components) >", "logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool: \"\"\" @param topology: @return: \"\"\"", "DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool: \"\"\" @param topology: @return:", "> 0 if routers_present: # check that all hosts connect to a router", "n in peers): logger.warning(\"Host %s is not connected to a router\", host.label) valid", "def check_layer2_conn(topology: Topology) -> bool: \"\"\" @param topology: @return: \"\"\" valid = True", "not any(n.type == DeviceType.ROUTER for n in peers): logger.warning(\"Host %s is not connected", "0 if routers_present: # check that all hosts connect to a router hosts", "hosts = filters.hosts(topology) for host in hosts: peers = host.peer_nodes() if not any(n.type", "-> bool: \"\"\" @param topology: @return: \"\"\" valid = True components = connected_components(topology)", "valid = False routers = filters.routers(topology) routers_present = len(routers) > 0 if routers_present:", "filters.hosts(topology) for host in hosts: peers = host.peer_nodes() if not any(n.type == DeviceType.ROUTER", "in hosts: peers = host.peer_nodes() if not any(n.type == DeviceType.ROUTER for n in", "connected_components(topology) if len(components) > 1: logger.warning(\"Disconnected network: %s components\", len(components)) valid = False", "check that all hosts connect to a router hosts = filters.hosts(topology) for host" ]
[ "'103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']},", "'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE',", "'biofab.sample.900000', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309',", "('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ]", "('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52',", "['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate':", "('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'replicate': 3,", "'22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES =", "'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of':", "'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of':", "'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [", "[ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4',", "'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'),", "['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']},", "[ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type':", "('expt1', {'sample_id': 'biofab.sample.900000', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3',", "'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'replicate': 3, 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a')", "'84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0,", "('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE',", "= [ ('expt1', {'sample_id': 'biofab.sample.900000', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']},", "'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id':", "'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id':", "= [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG',", "['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of':", "CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42',", "'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'),", "{'sample_id': 'biofab.sample.900000', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id':", "{'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES = [ ('expt1',", "{'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES", "['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES", "'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'replicate': 3, 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']},", "CREATES UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2',", "'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']},", "'103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a')", "'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of':", "['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41',", "('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4',", "'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5',", "0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3',", "'1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'),", "{'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type':", "{'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN", "'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51',", "('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ]", "'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES =", "UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id':", "'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES", "'103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'replicate':", "[ ('expt1', {'sample_id': 'biofab.sample.900000', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'),", "'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN =", "'596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337')", "['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']},", "{'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']},", "{'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id':", "'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000',", "{'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of':", "'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC',", "('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES = [", "'1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41', {'sample_id':", "= [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id':", "DELETES = CREATES UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']},", "['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309',", "'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5',", "= CREATES UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'),", "{'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'replicate': 3, 'child_of':", "('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of':", "] DELETES = CREATES UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of':", "'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES = [ ('expt1', {'sample_id':", "] CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'),", "'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'replicate': 3, 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ]", "CREATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of':", "['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}," ]
[ "goes! Will use cross validation metrics here, nothing too fancy ''' # Make", "from sklearn.model_selection import cross_val_score, KFold, train_test_split # Define url and columns url =", "Improve Accuracy with Ensemble Methods #################################### ''' Here in the course would have", "Soft Vote with weights # Some models will be more valuable than others", "(+\\-%.3f%%)\" % (scoring, name, result.mean() * 100.0, result.std() * 100.0)) else: print(\"\\n%s of", "['Voting Classifier 2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting", "https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> # # Project will soon be found at:", "ensemble method I haven't tried: The Voting Classifier This method involves literally combining", "= 1))], voting = 'soft')] # Number 3: Soft Vote with weights #", "3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting", "https://www.inertia7.com/projects/ #################################### # Welcome to my repo for the Mastering Machine Learning Python", "read_csv(url, names = columns) array = data.values # Divide data into attributes and", "models: k_fold = KFold(n_splits = 10, random_state = 1) for scoring in ('accuracy',", "I will be going through each part of the course # So you", "of %s model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring, name, result.mean() * 100.0, result.std() *", "AttributeError: print(\"The %s model cannot perform cross validation with the %s metric\" %", "with Ensemble Methods #################################### ''' Here in the course would have been a", "('lr', LogisticRegression(random_state = 1))], voting = 'soft')] # Number 3: Soft Vote with", "the course # So you can get a feel of the different parts", "except AttributeError: print(\"The %s model cannot perform cross validation with the %s metric\"", "[ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'hard')] #", "('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting = 'soft')] # Number 3: Soft", "# By <NAME> # # Project will soon be found at: # #", "# Lesson 11: Improve Accuracy with Ensemble Methods #################################### ''' Here in the", "Soft Vote (Argmax of sums of predicted probabilities used) # Recommended for ensemble", "[ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'soft', weights", "dtype = object) # Voting ensembles # Number 1: Hard Vote (Predicted class", "the Mastering Machine Learning Python Mini Course # Here I will be going", "print(\"The %s model cannot perform cross validation with the %s metric\" % (name,", "2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting = 'soft')]", "to my repo for the Mastering Machine Learning Python Mini Course # Here", "0:8] y = array[:, 8] #################################### # Lesson 11: Improve Accuracy with Ensemble", "here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> # # Project will soon", "models will be more valuable than others models[2] = ['Voting Classifier 3', VotingClassifier(estimators", "'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']) #", "Boosted Machine) Here I will try out a bunch of different things and", "# https://www.inertia7.com/projects/ #################################### # Welcome to my repo for the Mastering Machine Learning", "Mini Course # Here I will be going through each part of the", "cv = k_fold, scoring = scoring) if scoring == 'accuracy': print(\"\\n%s of %s", "probabilities used) # Recommended for ensemble of well-calibrated classifiers models[1] = ['Voting Classifier", "use cross validation metrics here, nothing too fancy ''' # Make list for", "for models models = np.empty([3, 2], dtype = object) # Voting ensembles #", "= 10, random_state = 1) for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result", "# Project will soon be found at: # # https://www.inertia7.com/projects/ #################################### # Welcome", "'soft')] # Number 3: Soft Vote with weights # Some models will be", "import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score, KFold, train_test_split", "= [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting = 'soft')] # Number", "'mass', 'pedi', 'age', 'class']) # Read in data data = read_csv(url, names =", "in a Random Forest or Gradient Boosted Machine) Here I will try out", "Classifier 3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),],", "for name, model in models: k_fold = KFold(n_splits = 10, random_state = 1)", "same models (many Decision Trees in a Random Forest or Gradient Boosted Machine)", "''' # Make list for models models = np.empty([3, 2], dtype = object)", "np.empty([3, 2], dtype = object) # Voting ensembles # Number 1: Hard Vote", "* 100.0, result.std() * 100.0)) else: print(\"\\n%s of %s model:\\n %.3f (+\\-%.3f)\" %", "Mastering Machine Learning Python Mini Course # Here I will be going through", "models models = np.empty([3, 2], dtype = object) # Voting ensembles # Number", "for ensemble of well-calibrated classifiers models[1] = ['Voting Classifier 2', VotingClassifier(estimators = [", "Logsitic Regression + Decision Tree) versus many of the same models (many Decision", "sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score, KFold, train_test_split # Define url", "evaluate for name, model in models: k_fold = KFold(n_splits = 10, random_state =", "if scoring == 'accuracy': print(\"\\n%s of %s model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring, name,", "columns) array = data.values # Divide data into attributes and predictor X =", "model:\\n %.3f (+\\-%.3f)\" % (scoring, name, result.mean(), result.std())) except AttributeError: print(\"The %s model", "course # So you can get a feel of the different parts import", "array = data.values # Divide data into attributes and predictor X = array[:,", "'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']) # Read in data data =", "Course # Here I will be going through each part of the course", "Decision Tree) versus many of the same models (many Decision Trees in a", "By <NAME> # # Project will soon be found at: # # https://www.inertia7.com/projects/", "the one ensemble method I haven't tried: The Voting Classifier This method involves", "Here I will try out a bunch of different things and see where", "section to do some ensemble model training, as it represents an extra layer", "this, I will instead invoke the one ensemble method I haven't tried: The", "= 1)),], voting = 'hard')] # Number 2: Soft Vote (Argmax of sums", "import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection", "data data = read_csv(url, names = columns) array = data.values # Divide data", "fancy ''' # Make list for models models = np.empty([3, 2], dtype =", "= scoring) if scoring == 'accuracy': print(\"\\n%s of %s model:\\n %.3f%% (+\\-%.3f%%)\" %", "KFold, train_test_split # Define url and columns url = 'https://goo.gl/bDdBiA' columns = np.array(['preg',", "as Logsitic Regression + Decision Tree) versus many of the same models (many", "#################################### # Lesson 11: Improve Accuracy with Ensemble Methods #################################### ''' Here in", "scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model, X, y, cv =", "# https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> # # Project will soon be found", "Classifier 1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),],", "= np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']) # Read in", "Read in data data = read_csv(url, names = columns) array = data.values #", "'accuracy': print(\"\\n%s of %s model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring, name, result.mean() * 100.0,", "1)),], voting = 'hard')] # Number 2: Soft Vote (Argmax of sums of", "classifiers models[1] = ['Voting Classifier 2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state", "through models, then fit & evaluate for name, model in models: k_fold =", "for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model, X, y, cv", "nothing too fancy ''' # Make list for models models = np.empty([3, 2],", "a Random Forest or Gradient Boosted Machine) Here I will try out a", "in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model, X, y, cv = k_fold,", "Python Mini Course # Here I will be going through each part of", "= object) # Voting ensembles # Number 1: Hard Vote (Predicted class labels", "found at: # # https://www.inertia7.com/projects/ #################################### # Welcome to my repo for the", "(+\\-%.3f)\" % (scoring, name, result.mean(), result.std())) except AttributeError: print(\"The %s model cannot perform", "parts import numpy as np import pandas as pd from pandas import read_csv,", "= 'soft')] # Number 3: Soft Vote with weights # Some models will", "you can get a feel of the different parts import numpy as np", "instead invoke the one ensemble method I haven't tried: The Voting Classifier This", "['Voting Classifier 3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state =", "import numpy as np import pandas as pd from pandas import read_csv, Series", "= 1) for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model, X,", "== 'accuracy': print(\"\\n%s of %s model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring, name, result.mean() *", "= ['Voting Classifier 2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))],", "from pandas import read_csv, Series from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis", "= 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'hard')] # Number 2: Soft", "from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier", "Number 1: Hard Vote (Predicted class labels used for majority rule voting) models[0]", "it goes! Will use cross validation metrics here, nothing too fancy ''' #", "of %s model:\\n %.3f (+\\-%.3f)\" % (scoring, name, result.mean(), result.std())) except AttributeError: print(\"The", "Voting ensembles # Number 1: Hard Vote (Predicted class labels used for majority", "be more valuable than others models[2] = ['Voting Classifier 3', VotingClassifier(estimators = [", "The Voting Classifier This method involves literally combining different models (such as Logsitic", "Course # # Inspired by the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # #", "# Welcome to my repo for the Mastering Machine Learning Python Mini Course", "%.3f%% (+\\-%.3f%%)\" % (scoring, name, result.mean() * 100.0, result.std() * 100.0)) else: print(\"\\n%s", "else: print(\"\\n%s of %s model:\\n %.3f (+\\-%.3f)\" % (scoring, name, result.mean(), result.std())) except", "= array[:, 8] #################################### # Lesson 11: Improve Accuracy with Ensemble Methods ####################################", "Learning Python Mini Course # Here I will be going through each part", "course would have been a section to do some ensemble model training, as", "different parts import numpy as np import pandas as pd from pandas import", "0.75))] # Iterate through models, then fit & evaluate for name, model in", "+ Decision Tree) versus many of the same models (many Decision Trees in", "name, result.mean(), result.std())) except AttributeError: print(\"The %s model cannot perform cross validation with", "as it represents an extra layer on top of traditional models But since", "been a section to do some ensemble model training, as it represents an", "# Here I will be going through each part of the course #", "with weights # Some models will be more valuable than others models[2] =", "= [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'soft',", "1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting", "Welcome to my repo for the Mastering Machine Learning Python Mini Course #", "do some ensemble model training, as it represents an extra layer on top", "KFold(n_splits = 10, random_state = 1) for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try:", "and predictor X = array[:, 0:8] y = array[:, 8] #################################### # Lesson", "8] #################################### # Lesson 11: Improve Accuracy with Ensemble Methods #################################### ''' Here", "'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']) # Read in data data", "= 'soft', weights = (0.25, 0.75))] # Iterate through models, then fit &", "result.std())) except AttributeError: print(\"The %s model cannot perform cross validation with the %s", "some ensemble model training, as it represents an extra layer on top of", "method I haven't tried: The Voting Classifier This method involves literally combining different", "traditional models But since I have already done this, I will instead invoke", "then fit & evaluate for name, model in models: k_fold = KFold(n_splits =", "Trees in a Random Forest or Gradient Boosted Machine) Here I will try", "('accuracy', 'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model, X, y, cv = k_fold, scoring", "1)),], voting = 'soft', weights = (0.25, 0.75))] # Iterate through models, then", "I will instead invoke the one ensemble method I haven't tried: The Voting", "(Argmax of sums of predicted probabilities used) # Recommended for ensemble of well-calibrated", "(such as Logsitic Regression + Decision Tree) versus many of the same models", "LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'hard')] # Number 2:", "random_state = 1) for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model,", "literally combining different models (such as Logsitic Regression + Decision Tree) versus many", "my repo for the Mastering Machine Learning Python Mini Course # Here I", "# Read in data data = read_csv(url, names = columns) array = data.values", "print(\"\\n%s of %s model:\\n %.3f (+\\-%.3f)\" % (scoring, name, result.mean(), result.std())) except AttributeError:", "Mini Course # # Inspired by the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q #", "top of traditional models But since I have already done this, I will", "Divide data into attributes and predictor X = array[:, 0:8] y = array[:,", "voting = 'hard')] # Number 2: Soft Vote (Argmax of sums of predicted", "= 1)),], voting = 'soft', weights = (0.25, 0.75))] # Iterate through models,", "# # https://www.inertia7.com/projects/ #################################### # Welcome to my repo for the Mastering Machine", "data.values # Divide data into attributes and predictor X = array[:, 0:8] y", "Voting Classifier This method involves literally combining different models (such as Logsitic Regression", "an extra layer on top of traditional models But since I have already", "majority rule voting) models[0] = ['Voting Classifier 1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state", "''' Here in the course would have been a section to do some", "bunch of different things and see where it goes! Will use cross validation", "Vote with weights # Some models will be more valuable than others models[2]", "different things and see where it goes! Will use cross validation metrics here,", "or Gradient Boosted Machine) Here I will try out a bunch of different", "columns = np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']) # Read", "soon be found at: # # https://www.inertia7.com/projects/ #################################### # Welcome to my repo", "# So you can get a feel of the different parts import numpy", "2: Soft Vote (Argmax of sums of predicted probabilities used) # Recommended for", "to do some ensemble model training, as it represents an extra layer on", "VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting =", "Vote (Predicted class labels used for majority rule voting) models[0] = ['Voting Classifier", "# Number 3: Soft Vote with weights # Some models will be more", "('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'hard')] # Number 2: Soft Vote (Argmax", "'pedi', 'age', 'class']) # Read in data data = read_csv(url, names = columns)", "going through each part of the course # So you can get a", "than others models[2] = ['Voting Classifier 3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state =", "for the Mastering Machine Learning Python Mini Course # Here I will be", "'hard')] # Number 2: Soft Vote (Argmax of sums of predicted probabilities used)", "VotingClassifier from sklearn.model_selection import cross_val_score, KFold, train_test_split # Define url and columns url", "Make list for models models = np.empty([3, 2], dtype = object) # Voting", "1) for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model, X, y,", "= ['Voting Classifier 3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state", "y = array[:, 8] #################################### # Lesson 11: Improve Accuracy with Ensemble Methods", "%s model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring, name, result.mean() * 100.0, result.std() * 100.0))", "# # Project will soon be found at: # # https://www.inertia7.com/projects/ #################################### #", "%s model:\\n %.3f (+\\-%.3f)\" % (scoring, name, result.mean(), result.std())) except AttributeError: print(\"The %s", "# Make list for models models = np.empty([3, 2], dtype = object) #", "on top of traditional models But since I have already done this, I", "of predicted probabilities used) # Recommended for ensemble of well-calibrated classifiers models[1] =", "tried: The Voting Classifier This method involves literally combining different models (such as", "voting = 'soft', weights = (0.25, 0.75))] # Iterate through models, then fit", "# Mastering ML Python Mini Course # # Inspired by the project here:", "# # By <NAME> # # Project will soon be found at: #", "python3 ################################### # Mastering ML Python Mini Course # # Inspired by the", "array[:, 8] #################################### # Lesson 11: Improve Accuracy with Ensemble Methods #################################### '''", "# Inspired by the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME>", "'age', 'class']) # Read in data data = read_csv(url, names = columns) array", "GradientBoostingClassifier(random_state = 1)),], voting = 'soft', weights = (0.25, 0.75))] # Iterate through", "# Number 2: Soft Vote (Argmax of sums of predicted probabilities used) #", "= (0.25, 0.75))] # Iterate through models, then fit & evaluate for name,", "involves literally combining different models (such as Logsitic Regression + Decision Tree) versus", "read_csv, Series from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import", "= columns) array = data.values # Divide data into attributes and predictor X", "& evaluate for name, model in models: k_fold = KFold(n_splits = 10, random_state", "models[2] = ['Voting Classifier 3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm',", "of the course # So you can get a feel of the different", "of traditional models But since I have already done this, I will instead", "predictor X = array[:, 0:8] y = array[:, 8] #################################### # Lesson 11:", "X = array[:, 0:8] y = array[:, 8] #################################### # Lesson 11: Improve", "y, cv = k_fold, scoring = scoring) if scoring == 'accuracy': print(\"\\n%s of", "1: Hard Vote (Predicted class labels used for majority rule voting) models[0] =", "get a feel of the different parts import numpy as np import pandas", "more valuable than others models[2] = ['Voting Classifier 3', VotingClassifier(estimators = [ ('lr',", "at: # # https://www.inertia7.com/projects/ #################################### # Welcome to my repo for the Mastering", "see where it goes! Will use cross validation metrics here, nothing too fancy", "Number 3: Soft Vote with weights # Some models will be more valuable", "in the course would have been a section to do some ensemble model", "10, random_state = 1) for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result =", "and see where it goes! Will use cross validation metrics here, nothing too", "Tree) versus many of the same models (many Decision Trees in a Random", "columns url = 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi',", "['Voting Classifier 1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state =", "3: Soft Vote with weights # Some models will be more valuable than", "I have already done this, I will instead invoke the one ensemble method", "Define url and columns url = 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres', 'skin',", "'skin', 'test', 'mass', 'pedi', 'age', 'class']) # Read in data data = read_csv(url,", "things and see where it goes! Will use cross validation metrics here, nothing", "= 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'soft', weights = (0.25, 0.75))]", "import read_csv, Series from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble", "repo for the Mastering Machine Learning Python Mini Course # Here I will", "the course would have been a section to do some ensemble model training,", "the same models (many Decision Trees in a Random Forest or Gradient Boosted", "Here in the course would have been a section to do some ensemble", "Hard Vote (Predicted class labels used for majority rule voting) models[0] = ['Voting", "name, model in models: k_fold = KFold(n_splits = 10, random_state = 1) for", "result = cross_val_score(model, X, y, cv = k_fold, scoring = scoring) if scoring", "feel of the different parts import numpy as np import pandas as pd", "sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score, KFold,", "import cross_val_score, KFold, train_test_split # Define url and columns url = 'https://goo.gl/bDdBiA' columns", "result.mean() * 100.0, result.std() * 100.0)) else: print(\"\\n%s of %s model:\\n %.3f (+\\-%.3f)\"", "used) # Recommended for ensemble of well-calibrated classifiers models[1] = ['Voting Classifier 2',", "model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring, name, result.mean() * 100.0, result.std() * 100.0)) else:", "of the different parts import numpy as np import pandas as pd from", "'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model, X, y, cv = k_fold, scoring =", "Vote (Argmax of sums of predicted probabilities used) # Recommended for ensemble of", "sklearn.model_selection import cross_val_score, KFold, train_test_split # Define url and columns url = 'https://goo.gl/bDdBiA'", "'test', 'mass', 'pedi', 'age', 'class']) # Read in data data = read_csv(url, names", "k_fold = KFold(n_splits = 10, random_state = 1) for scoring in ('accuracy', 'roc_auc',", "sums of predicted probabilities used) # Recommended for ensemble of well-calibrated classifiers models[1]", "print(\"\\n%s of %s model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring, name, result.mean() * 100.0, result.std()", "result.std() * 100.0)) else: print(\"\\n%s of %s model:\\n %.3f (+\\-%.3f)\" % (scoring, name,", "k_fold, scoring = scoring) if scoring == 'accuracy': print(\"\\n%s of %s model:\\n %.3f%%", "used for majority rule voting) models[0] = ['Voting Classifier 1', VotingClassifier(estimators = [", "# Voting ensembles # Number 1: Hard Vote (Predicted class labels used for", "I will try out a bunch of different things and see where it", "models[1] = ['Voting Classifier 2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state =", "of the same models (many Decision Trees in a Random Forest or Gradient", "VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting = 'soft')] #", "1))], voting = 'soft')] # Number 3: Soft Vote with weights # Some", "invoke the one ensemble method I haven't tried: The Voting Classifier This method", "predicted probabilities used) # Recommended for ensemble of well-calibrated classifiers models[1] = ['Voting", "LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'soft', weights = (0.25,", "from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score, KFold, train_test_split # Define", "valuable than others models[2] = ['Voting Classifier 3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state", "will instead invoke the one ensemble method I haven't tried: The Voting Classifier", "too fancy ''' # Make list for models models = np.empty([3, 2], dtype", "done this, I will instead invoke the one ensemble method I haven't tried:", "= data.values # Divide data into attributes and predictor X = array[:, 0:8]", "attributes and predictor X = array[:, 0:8] y = array[:, 8] #################################### #", "many of the same models (many Decision Trees in a Random Forest or", "Forest or Gradient Boosted Machine) Here I will try out a bunch of", "%.3f (+\\-%.3f)\" % (scoring, name, result.mean(), result.std())) except AttributeError: print(\"The %s model cannot", "from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score,", "Gradient Boosted Machine) Here I will try out a bunch of different things", "cross_val_score, KFold, train_test_split # Define url and columns url = 'https://goo.gl/bDdBiA' columns =", "Recommended for ensemble of well-calibrated classifiers models[1] = ['Voting Classifier 2', VotingClassifier(estimators =", "sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from", "Accuracy with Ensemble Methods #################################### ''' Here in the course would have been", "Some models will be more valuable than others models[2] = ['Voting Classifier 3',", "'class']) # Read in data data = read_csv(url, names = columns) array =", "= read_csv(url, names = columns) array = data.values # Divide data into attributes", "extra layer on top of traditional models But since I have already done", "2], dtype = object) # Voting ensembles # Number 1: Hard Vote (Predicted", "#!/usr/bin/env python3 ################################### # Mastering ML Python Mini Course # # Inspired by", "it represents an extra layer on top of traditional models But since I", "= k_fold, scoring = scoring) if scoring == 'accuracy': print(\"\\n%s of %s model:\\n", "# Define url and columns url = 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres',", "model training, as it represents an extra layer on top of traditional models", "# Number 1: Hard Vote (Predicted class labels used for majority rule voting)", "as pd from pandas import read_csv, Series from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis", "LogisticRegression(random_state = 1))], voting = 'soft')] # Number 3: Soft Vote with weights", "validation metrics here, nothing too fancy ''' # Make list for models models", "Inspired by the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> #", "np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']) # Read in data", "Methods #################################### ''' Here in the course would have been a section to", "where it goes! Will use cross validation metrics here, nothing too fancy '''", "have been a section to do some ensemble model training, as it represents", "will soon be found at: # # https://www.inertia7.com/projects/ #################################### # Welcome to my", "array[:, 0:8] y = array[:, 8] #################################### # Lesson 11: Improve Accuracy with", "try out a bunch of different things and see where it goes! Will", "scoring) if scoring == 'accuracy': print(\"\\n%s of %s model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring,", "pandas as pd from pandas import read_csv, Series from sklearn.linear_model import LogisticRegression from", "= array[:, 0:8] y = array[:, 8] #################################### # Lesson 11: Improve Accuracy", "(Predicted class labels used for majority rule voting) models[0] = ['Voting Classifier 1',", "Regression + Decision Tree) versus many of the same models (many Decision Trees", "already done this, I will instead invoke the one ensemble method I haven't", "1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'soft', weights = (0.25, 0.75))] #", "data into attributes and predictor X = array[:, 0:8] y = array[:, 8]", "models (such as Logsitic Regression + Decision Tree) versus many of the same", "(0.25, 0.75))] # Iterate through models, then fit & evaluate for name, model", "data = read_csv(url, names = columns) array = data.values # Divide data into", "11: Improve Accuracy with Ensemble Methods #################################### ''' Here in the course would", "('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'soft', weights = (0.25, 0.75))] # Iterate", "Here I will be going through each part of the course # So", "% (scoring, name, result.mean(), result.std())) except AttributeError: print(\"The %s model cannot perform cross", "url and columns url = 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres', 'skin', 'test',", "= np.empty([3, 2], dtype = object) # Voting ensembles # Number 1: Hard", "= ['Voting Classifier 1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state", "= [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'hard')]", "into attributes and predictor X = array[:, 0:8] y = array[:, 8] ####################################", "Project will soon be found at: # # https://www.inertia7.com/projects/ #################################### # Welcome to", "cross validation metrics here, nothing too fancy ''' # Make list for models", "can get a feel of the different parts import numpy as np import", "labels used for majority rule voting) models[0] = ['Voting Classifier 1', VotingClassifier(estimators =", "So you can get a feel of the different parts import numpy as", "a feel of the different parts import numpy as np import pandas as", "100.0)) else: print(\"\\n%s of %s model:\\n %.3f (+\\-%.3f)\" % (scoring, name, result.mean(), result.std()))", "through each part of the course # So you can get a feel", "Number 2: Soft Vote (Argmax of sums of predicted probabilities used) # Recommended", "class labels used for majority rule voting) models[0] = ['Voting Classifier 1', VotingClassifier(estimators", "% (scoring, name, result.mean() * 100.0, result.std() * 100.0)) else: print(\"\\n%s of %s", "GradientBoostingClassifier(random_state = 1)),], voting = 'hard')] # Number 2: Soft Vote (Argmax of", "fit & evaluate for name, model in models: k_fold = KFold(n_splits = 10,", "X, y, cv = k_fold, scoring = scoring) if scoring == 'accuracy': print(\"\\n%s", "# # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> # # Project will soon be", "%s model cannot perform cross validation with the %s metric\" % (name, scoring))", "versus many of the same models (many Decision Trees in a Random Forest", "each part of the course # So you can get a feel of", "well-calibrated classifiers models[1] = ['Voting Classifier 2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr',", "part of the course # So you can get a feel of the", "weights = (0.25, 0.75))] # Iterate through models, then fit & evaluate for", "a section to do some ensemble model training, as it represents an extra", "url = 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age',", "[ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting = 'soft')] # Number 3:", "will be more valuable than others models[2] = ['Voting Classifier 3', VotingClassifier(estimators =", "(scoring, name, result.mean() * 100.0, result.std() * 100.0)) else: print(\"\\n%s of %s model:\\n", "list for models models = np.empty([3, 2], dtype = object) # Voting ensembles", "since I have already done this, I will instead invoke the one ensemble", "scoring == 'accuracy': print(\"\\n%s of %s model:\\n %.3f%% (+\\-%.3f%%)\" % (scoring, name, result.mean()", "LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting = 'soft')] # Number 3: Soft Vote", "('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'hard')] # Number", "model in models: k_fold = KFold(n_splits = 10, random_state = 1) for scoring", "<NAME> # # Project will soon be found at: # # https://www.inertia7.com/projects/ ####################################", "100.0, result.std() * 100.0)) else: print(\"\\n%s of %s model:\\n %.3f (+\\-%.3f)\" % (scoring,", "train_test_split # Define url and columns url = 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas',", "be going through each part of the course # So you can get", "weights # Some models will be more valuable than others models[2] = ['Voting", "layer on top of traditional models But since I have already done this,", "others models[2] = ['Voting Classifier 3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)),", "be found at: # # https://www.inertia7.com/projects/ #################################### # Welcome to my repo for", "Will use cross validation metrics here, nothing too fancy ''' # Make list", "in data data = read_csv(url, names = columns) array = data.values # Divide", "This method involves literally combining different models (such as Logsitic Regression + Decision", "models = np.empty([3, 2], dtype = object) # Voting ensembles # Number 1:", "'neg_log_loss'): try: result = cross_val_score(model, X, y, cv = k_fold, scoring = scoring)", "scoring = scoring) if scoring == 'accuracy': print(\"\\n%s of %s model:\\n %.3f%% (+\\-%.3f%%)\"", "GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score, KFold, train_test_split # Define url and columns", "#################################### # Welcome to my repo for the Mastering Machine Learning Python Mini", "method involves literally combining different models (such as Logsitic Regression + Decision Tree)", "would have been a section to do some ensemble model training, as it", "'soft', weights = (0.25, 0.75))] # Iterate through models, then fit & evaluate", "of different things and see where it goes! Will use cross validation metrics", "Classifier This method involves literally combining different models (such as Logsitic Regression +", "voting = 'soft')] # Number 3: Soft Vote with weights # Some models", "will be going through each part of the course # So you can", "ML Python Mini Course # # Inspired by the project here: # #", "= KFold(n_splits = 10, random_state = 1) for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'):", "= 'hard')] # Number 2: Soft Vote (Argmax of sums of predicted probabilities", "1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'hard')] # Number 2: Soft Vote", "name, result.mean() * 100.0, result.std() * 100.0)) else: print(\"\\n%s of %s model:\\n %.3f", "* 100.0)) else: print(\"\\n%s of %s model:\\n %.3f (+\\-%.3f)\" % (scoring, name, result.mean(),", "by the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> # #", "But since I have already done this, I will instead invoke the one", "different models (such as Logsitic Regression + Decision Tree) versus many of the", "Lesson 11: Improve Accuracy with Ensemble Methods #################################### ''' Here in the course", "models, then fit & evaluate for name, model in models: k_fold = KFold(n_splits", "Python Mini Course # # Inspired by the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q", "import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score, KFold, train_test_split # Define url and", "= 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class'])", "Decision Trees in a Random Forest or Gradient Boosted Machine) Here I will", "Machine Learning Python Mini Course # Here I will be going through each", "models But since I have already done this, I will instead invoke the", "object) # Voting ensembles # Number 1: Hard Vote (Predicted class labels used", "for majority rule voting) models[0] = ['Voting Classifier 1', VotingClassifier(estimators = [ ('lr',", "# Divide data into attributes and predictor X = array[:, 0:8] y =", "metrics here, nothing too fancy ''' # Make list for models models =", "(scoring, name, result.mean(), result.std())) except AttributeError: print(\"The %s model cannot perform cross validation", "as np import pandas as pd from pandas import read_csv, Series from sklearn.linear_model", "(many Decision Trees in a Random Forest or Gradient Boosted Machine) Here I", "of sums of predicted probabilities used) # Recommended for ensemble of well-calibrated classifiers", "#################################### ''' Here in the course would have been a section to do", "Mastering ML Python Mini Course # # Inspired by the project here: #", "ensembles # Number 1: Hard Vote (Predicted class labels used for majority rule", "ensemble model training, as it represents an extra layer on top of traditional", "haven't tried: The Voting Classifier This method involves literally combining different models (such", "LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score, KFold, train_test_split #", "# # Inspired by the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By", "Ensemble Methods #################################### ''' Here in the course would have been a section", "try: result = cross_val_score(model, X, y, cv = k_fold, scoring = scoring) if", "I haven't tried: The Voting Classifier This method involves literally combining different models", "= cross_val_score(model, X, y, cv = k_fold, scoring = scoring) if scoring ==", "the different parts import numpy as np import pandas as pd from pandas", "training, as it represents an extra layer on top of traditional models But", "# Some models will be more valuable than others models[2] = ['Voting Classifier", "of well-calibrated classifiers models[1] = ['Voting Classifier 2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()),", "in models: k_fold = KFold(n_splits = 10, random_state = 1) for scoring in", "cross_val_score(model, X, y, cv = k_fold, scoring = scoring) if scoring == 'accuracy':", "pd from pandas import read_csv, Series from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import", "np import pandas as pd from pandas import read_csv, Series from sklearn.linear_model import", "('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'soft', weights =", "a bunch of different things and see where it goes! Will use cross", "Series from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier,", "Iterate through models, then fit & evaluate for name, model in models: k_fold", "combining different models (such as Logsitic Regression + Decision Tree) versus many of", "out a bunch of different things and see where it goes! Will use", "voting) models[0] = ['Voting Classifier 1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)),", "have already done this, I will instead invoke the one ensemble method I", "LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import", "Machine) Here I will try out a bunch of different things and see", "# Recommended for ensemble of well-calibrated classifiers models[1] = ['Voting Classifier 2', VotingClassifier(estimators", "pandas import read_csv, Series from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from", "will try out a bunch of different things and see where it goes!", "the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> # # Project", "Classifier 2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting =", "models[0] = ['Voting Classifier 1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm',", "Random Forest or Gradient Boosted Machine) Here I will try out a bunch", "numpy as np import pandas as pd from pandas import read_csv, Series from", "# Iterate through models, then fit & evaluate for name, model in models:", "import pandas as pd from pandas import read_csv, Series from sklearn.linear_model import LogisticRegression", "and columns url = 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass',", "one ensemble method I haven't tried: The Voting Classifier This method involves literally", "here, nothing too fancy ''' # Make list for models models = np.empty([3,", "################################### # Mastering ML Python Mini Course # # Inspired by the project", "ensemble of well-calibrated classifiers models[1] = ['Voting Classifier 2', VotingClassifier(estimators = [ ('lda',", "result.mean(), result.std())) except AttributeError: print(\"The %s model cannot perform cross validation with the", "represents an extra layer on top of traditional models But since I have", "project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> # # Project will", "models (many Decision Trees in a Random Forest or Gradient Boosted Machine) Here", "names = columns) array = data.values # Divide data into attributes and predictor", "rule voting) models[0] = ['Voting Classifier 1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state =" ]
[ "= load_cached(stat_id=stat_id, _type='table_list') if not tables: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'),", "return tables def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable:", "None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service(): d = local() if", "% stat_id, \"code\", \"100\", config.get('lang')) tables = [] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'):", "= parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1')) except WebFault, wf: #print wf table =", "service = get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", # format, False,", "= load_cached(table_id=table_id, _type='table_meta') if not table: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'),", "None return table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if not tables: service", "\"\", \"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e in data.merkmalAuspraegungenKatalogEintraege:", "\"*\", False, \"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table =", "d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service(): d =", "t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables == EMPTY: return []", "return None return table def get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100): table = load_cached(table_id=table_id,", "load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable: service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'),", "= load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if not table: service = get_download_service().service", "get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table')", "format, False, transposed, str(from_year), str(to_year), # \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\",", "table == EMPTY: return None return table def get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100):", "\"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1])", "table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif table == EMPTY: return None return", "_type='feature') if not variable: service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name,", "= Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service(): d = local() if d.__dict__.get('export') is", "import config from mongo import cache_result, load_cached, EMPTY from util import serialize_soap def", "= local() if d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id):", "format=\"csv\", from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if not", "from mongo import cache_result, load_cached, EMPTY from util import serialize_soap def get_test_service(): d", "get_download_service(): d = local() if d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return", "config.get('password'), stat_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic,", "\"100\", config.get('lang')) #print data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif", "wf table = None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif table", "to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if not table: service", "def get_download_service(): d = local() if d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True)", "config.get('password'), table_id, \"Alle\", format, False, str(from_year), str(to_year), \"*\", \"*\", \"*\", \"*\", \"*\", \"*\",", "to_year=2100, _type='table') if not table: service = get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'),", "d = local() if d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def", "find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if not tables: service = get_recherche_service().service data =", "import * from suds import * import config from mongo import cache_result, load_cached,", "_type='table_meta') if not table: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\",", "cache_result(table, table_id=table_id, _type='table_meta') elif table == EMPTY: return None return table def get_table(table_id,", "statistic = load_cached(stat_id=stat_id, _type='statistic') if not statistic: service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'),", "cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif table == EMPTY: return None", "= service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format, False, str(from_year), str(to_year), \"*\", \"*\", \"*\", \"*\",", "service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\", \"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes = []", "d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if not", "d.__dict__.get('test') def get_recherche_service(): d = local() if d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service'))", "for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables == EMPTY: return", "var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable: service = get_recherche_service().service variable", "for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable = None cache_result(variable,", "tables: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id, \"code\", \"100\",", "attributes else: variable = None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif variable == EMPTY:", "#table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", # format, False, transposed, str(from_year), str(to_year), #", "d.__dict__.get('download') def get_export_service(): d = local() if d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service'))", "config.get('lang'), ) if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\",", "get_test_service(): d = local() if d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test')", "retxml=True) return d.__dict__.get('download') def get_export_service(): d = local() if d.__dict__.get('export') is None: d.__dict__['export']", "table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1')) except WebFault, wf: #print wf", "\"*\", \"*\", \"*\", False, \"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1]", "serialize_soap def get_test_service(): d = local() if d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service'))", "if not tables: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id,", "d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id,", "elif table == EMPTY: return None return table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id,", "\"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1')) except", "[] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t))", "service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\", config.get('lang')) #print data", "def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if not tables: service = get_recherche_service().service data", "= service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id, \"code\", \"100\", config.get('lang')) tables = [] #print", "data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: statistic", "import cache_result, load_cached, EMPTY from util import serialize_soap def get_test_service(): d = local()", "import local from suds.client import * from suds import * import config from", "get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if not table: service = get_recherche_service().service data =", "_type='table') elif table == EMPTY: return None return table def find_tables_by_statistic(stat_id): tables =", "= service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: statistic =", "serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table == EMPTY: return None return table def", "return None return statistic def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if not table:", "tables = [] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in", "= get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id, \"code\", \"100\", config.get('lang')) tables", "#print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables,", "str(to_year), # \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\",", "False, \"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1'))", "stat_id=stat_id, _type='table_list') elif tables == EMPTY: return [] return tables def get_variable(var_value, var_name='code'):", "is None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d = local() if", "format=format, from_year=1800, to_year=2100, _type='table') elif table == EMPTY: return None return table def", "config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1')) except WebFault,", "from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if not table:", "\"*\", False, \"\", # config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format, False,", "\"*\", \"*\", \"*\", \"*\", False, \"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\",", "transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if not table: service = get_download_service().service try: #table", "elif table == EMPTY: return None return table def get_table(table_id, transposed=False, format=\"csv\", from_year=1800,", "csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1')) except WebFault, wf: #print wf table", "str(from_year), str(to_year), # \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False,", "d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d = local()", "get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege:", "hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif", "if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list')", "get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\", \"Alle\", \"100\", config.get('lang'), ) if", "is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d = local() if", "tables = load_cached(stat_id=stat_id, _type='table_list') if not tables: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'),", "variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\", \"100\", config.get('lang'), )", "= [] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable =", "\"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", # config.get('lang')) table =", "Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if not statistic: service", "[] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable = None", "== EMPTY: return None return table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if", "not tables: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id, \"code\",", "= table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1')) except WebFault, wf: #print", "\"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv", "config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes']", "d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d = local()", "EMPTY: return None return statistic def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if not", "from suds import * import config from mongo import cache_result, load_cached, EMPTY from", "return d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if not statistic: service =", "service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\", \"Alle\", \"100\", config.get('lang'), ) if variable.objektKatalogEintraege: variable =", "variable.get('code'), \"*\", \"\", \"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e", "None return table def get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False,", "EMPTY from util import serialize_soap def get_test_service(): d = local() if d.__dict__.get('test') is", "table = unicode(csv.decode('latin-1')) except WebFault, wf: #print wf table = None cache_result(table, table_id=table_id,", "not statistic: service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\", config.get('lang'))", "\"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", # config.get('lang')) table", "statistic def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if not table: service = get_recherche_service().service", "* from suds import * import config from mongo import cache_result, load_cached, EMPTY", "cache_result, load_cached, EMPTY from util import serialize_soap def get_test_service(): d = local() if", "def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable: service =", "local() if d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d", "str(to_year), \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", config.get('lang'))", "= unicode(csv.decode('latin-1')) except WebFault, wf: #print wf table = None cache_result(table, table_id=table_id, transposed=False,", "table_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id,", "1)[-1] table = unicode(csv.decode('latin-1')) except WebFault, wf: #print wf table = None cache_result(table,", "get_recherche_service(): d = local() if d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche')", "var_name, \"*\", \"Alle\", \"100\", config.get('lang'), ) if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data =", "\"Alle\", format, False, str(from_year), str(to_year), \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\",", "in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables == EMPTY: return [] return", "threading import local from suds.client import * from suds import * import config", "tables def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable: service", "* import config from mongo import cache_result, load_cached, EMPTY from util import serialize_soap", "transposed, str(from_year), str(to_year), # \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\",", "not table: service = get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", #", "#print wf table = None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif", "config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format, False, str(from_year), str(to_year), \"*\", \"*\",", "\"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv =", "None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif variable == EMPTY: return None return variable", "= get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\", config.get('lang')) #print data if", "local() if d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d", "elif statistic == EMPTY: return None return statistic def get_table_meta(table_id): table = load_cached(table_id=table_id,", "config.get('password'), \"%s-*\" % stat_id, \"code\", \"100\", config.get('lang')) tables = [] #print data if", "\"100\", config.get('lang')) tables = [] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for", "load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if not table: service = get_download_service().service try:", "_type='table_list') if not tables: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" %", "\"100\", config.get('lang')) #print data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif", "if d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic =", "mongo import cache_result, load_cached, EMPTY from util import serialize_soap def get_test_service(): d =", "else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables == EMPTY:", "is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service(): d = local()", "stat_id=stat_id, _type='statistic') elif statistic == EMPTY: return None return statistic def get_table_meta(table_id): table", "data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic == EMPTY: return None", "serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic == EMPTY: return None return statistic def", "import serialize_soap def get_test_service(): d = local() if d.__dict__.get('test') is None: d.__dict__['test'] =", "[] return tables def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if not", "get_export_service(): d = local() if d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export')", "def get_test_service(): d = local() if d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service')) return", "= service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", # format, False, transposed, str(from_year), str(to_year), # \"*\",", "local from suds.client import * from suds import * import config from mongo", "else: variable = None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif variable == EMPTY: return", "data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: table", "d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service(): d = local() if d.__dict__.get('export')", "= Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d = local() if d.__dict__.get('recherche') is None:", "EMPTY: return None return table def get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100): table =", "config.get('password'), variable.get('code'), \"*\", \"\", \"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for", "#print data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table ==", "_type='table_meta') elif table == EMPTY: return None return table def get_table(table_id, transposed=False, format=\"csv\",", "== EMPTY: return None return table def get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100): table", "return table def get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format,", "= [] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege:", "local() if d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service():", "def get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100,", "d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d = local() if d.__dict__.get('recherche') is", "if not table: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\",", "Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d = local() if d.__dict__.get('recherche') is None: d.__dict__['recherche']", "if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic == EMPTY: return", "= get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", # format, False, transposed,", "is None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic')", "variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable: service = get_recherche_service().service variable =", "= Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if not statistic:", "if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes", "attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable = None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif", "return d.__dict__.get('test') def get_recherche_service(): d = local() if d.__dict__.get('recherche') is None: d.__dict__['recherche'] =", "\"*\", \"*\", \"*\", \"*\", False, \"\", # config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id,", "table_id, \"All\", # format, False, transposed, str(from_year), str(to_year), # \"*\", \"*\", \"*\", \"*\",", "= serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic == EMPTY: return None return statistic", "= service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\", \"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes =", "if d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service(): d", "tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables == EMPTY: return [] return tables def", "data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic == EMPTY:", "get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", # format, False, transposed, str(from_year),", "# \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", #", "= None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif variable == EMPTY: return None return", "table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format, False, str(from_year), str(to_year), \"*\", \"*\", \"*\",", "None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d = local() if d.__dict__.get('recherche')", "def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if not statistic: service = get_recherche_service().service data", "parts = table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1')) except WebFault, wf:", "= serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\", \"100\", config.get('lang'), ) if", "\"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", # config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'),", "False, \"\", # config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format, False, str(from_year),", "'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables", "from suds.client import * from suds import * import config from mongo import", "data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table == EMPTY: return None", "return [] return tables def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if", "data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables == EMPTY: return [] return tables", "= local() if d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def", "if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table == EMPTY: return", "= serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table == EMPTY: return None return table", ") if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\",", "\"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", config.get('lang')) parts", "= service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\", \"Alle\", \"100\", config.get('lang'), ) if variable.objektKatalogEintraege: variable", "table == EMPTY: return None return table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list')", "data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id,", "table = load_cached(table_id=table_id, _type='table_meta') if not table: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'),", "EMPTY: return None return table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if not", "== EMPTY: return None return statistic def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if", "#print data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic ==", "\"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", # config.get('lang'))", "not table: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\", config.get('lang'))", ") if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] =", "str(from_year), str(to_year), \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\",", "data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\", \"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes", "None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d = local() if d.__dict__.get('download')", "service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id, \"code\", \"100\", config.get('lang'))", "EMPTY: return [] return tables def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature')", "= get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\", \"Alle\", \"100\", config.get('lang'), )", "= Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d = local() if d.__dict__.get('download') is None:", "format, False, str(from_year), str(to_year), \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\",", "def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if not table: service = get_recherche_service().service data", "config.get('password'), table_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table,", "transposed=False, format=\"csv\", from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if", "return statistic def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if not table: service =", "None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if", "Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d = local() if d.__dict__.get('download') is None: d.__dict__['download']", "service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\", config.get('lang')) #print data", "var_value, var_name, \"*\", \"Alle\", \"100\", config.get('lang'), ) if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data", "unicode(csv.decode('latin-1')) except WebFault, wf: #print wf table = None cache_result(table, table_id=table_id, transposed=False, format=format,", "if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\", \"100\",", "\"*\", \"*\", \"*\", False, \"\", # config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\",", "data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id, \"code\", \"100\", config.get('lang')) tables = []", "load_cached(table_id=table_id, _type='table_meta') if not table: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id,", "table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if not tables: service = get_recherche_service().service", "\"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", config.get('lang')) parts =", "\"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta')", "service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", # format, False, transposed, str(from_year), str(to_year), # \"*\", \"*\",", "table def get_table(table_id, transposed=False, format=\"csv\", from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800,", "\"\", # config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format, False, str(from_year), str(to_year),", "False, transposed, str(from_year), str(to_year), # \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\",", "data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table == EMPTY:", "variable = None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif variable == EMPTY: return None", "\"*\", \"*\", False, \"\", # config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format,", "= attributes else: variable = None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif variable ==", "= load_cached(stat_id=stat_id, _type='statistic') if not statistic: service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'),", "table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if not table: service =", "get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable: service = get_recherche_service().service", "None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif table == EMPTY: return", "load_cached(stat_id=stat_id, _type='statistic') if not statistic: service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id,", "statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic == EMPTY: return None return", "from_year=1800, to_year=2100, _type='table') elif table == EMPTY: return None return table def find_tables_by_statistic(stat_id):", "to_year=2100, _type='table') elif table == EMPTY: return None return table def find_tables_by_statistic(stat_id): tables", "cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic == EMPTY: return None return statistic def get_table_meta(table_id):", "util import serialize_soap def get_test_service(): d = local() if d.__dict__.get('test') is None: d.__dict__['test']", "return d.__dict__.get('download') def get_export_service(): d = local() if d.__dict__.get('export') is None: d.__dict__['export'] =", "None return statistic def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if not table: service", "return None return table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if not tables:", "not variable: service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\", \"Alle\",", "table: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\", config.get('lang')) #print", "return table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if not tables: service =", "attributes = [] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable", "from util import serialize_soap def get_test_service(): d = local() if d.__dict__.get('test') is None:", "try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", # format, False, transposed, str(from_year), str(to_year),", "\"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e))", "if not statistic: service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\",", "\"Alle\", \"100\", config.get('lang'), ) if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'),", "d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if not statistic: service = get_recherche_service().service", "\"100\", config.get('lang'), ) if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'),", "serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\", \"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege:", "service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0])", "from threading import local from suds.client import * from suds import * import", "d.__dict__.get('recherche') def get_download_service(): d = local() if d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'),", "= get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\", config.get('lang')) #print data if", "suds.client import * from suds import * import config from mongo import cache_result,", "variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\", \"Alle\", \"100\", config.get('lang'), ) if variable.objektKatalogEintraege:", "d = local() if d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def", "local() if d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic", "service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\", \"Alle\", \"100\", config.get('lang'),", "config.get('password'), table_id, \"All\", # format, False, transposed, str(from_year), str(to_year), # \"*\", \"*\", \"*\",", "tables == EMPTY: return [] return tables def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value,", "table_id=table_id, _type='table_meta') elif table == EMPTY: return None return table def get_table(table_id, transposed=False,", "d = local() if d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download')", "_type='table_list') elif tables == EMPTY: return [] return tables def get_variable(var_value, var_name='code'): variable", "_type='statistic') elif statistic == EMPTY: return None return statistic def get_table_meta(table_id): table =", "table = None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif table ==", "\"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False, \"\", # config.get('lang')) table = service.TabellenDownload(config.get('user'),", "# config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format, False, str(from_year), str(to_year), \"*\",", "format=format, from_year=1800, to_year=2100, _type='table') if not table: service = get_download_service().service try: #table =", "load_cached(stat_id=stat_id, _type='table_list') if not tables: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\"", "load_cached, EMPTY from util import serialize_soap def get_test_service(): d = local() if d.__dict__.get('test')", "if d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d =", "config.get('lang')) #print data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table", "False, str(from_year), str(to_year), \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", False,", "except WebFault, wf: #print wf table = None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800,", "stat_id, \"code\", \"100\", config.get('lang')) tables = [] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege))", "def get_recherche_service(): d = local() if d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return", "if not variable: service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\",", "import * import config from mongo import cache_result, load_cached, EMPTY from util import", "from_year=1800, to_year=2100, _type='table') if not table: service = get_download_service().service try: #table = service.TabellenDownload(config.get('user'),", "statistic: service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\", config.get('lang')) #print", "\"All\", # format, False, transposed, str(from_year), str(to_year), # \"*\", \"*\", \"*\", \"*\", \"*\",", "variable['attributes'] = attributes else: variable = None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif variable", "statistic == EMPTY: return None return statistic def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta')", "== EMPTY: return [] return tables def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name,", "table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table == EMPTY: return None return", "Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service(): d = local() if d.__dict__.get('export') is None:", "# format, False, transposed, str(from_year), str(to_year), # \"*\", \"*\", \"*\", \"*\", \"*\", \"*\",", "\"*\", \"*\", False, \"\", config.get('lang')) parts = table.split(table.split(\"\\r\\n\")[1]) csv = parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table", "config.get('password'), var_value, var_name, \"*\", \"Alle\", \"100\", config.get('lang'), ) if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0])", "service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0])", "config.get('lang')) tables = [] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t", "wf: #print wf table = None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table')", "config.get('lang')) #print data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic", "variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), \"*\", \"\", \"100\", config.get('lang'),", "transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif table == EMPTY: return None return table", "parts[2].split(\"\\r\\n\\r\\n\", 1)[-1] table = unicode(csv.decode('latin-1')) except WebFault, wf: #print wf table = None", "= None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif table == EMPTY:", "get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id, \"code\", \"100\", config.get('lang')) tables =", "e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable = None cache_result(variable, var_value=var_value,", "= load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable: service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'),", "if not table: service = get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\",", "config from mongo import cache_result, load_cached, EMPTY from util import serialize_soap def get_test_service():", "get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege:", "\"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic')", "elif tables == EMPTY: return [] return tables def get_variable(var_value, var_name='code'): variable =", "def get_export_service(): d = local() if d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service')) return", "var_name=var_name, _type='feature') if not variable: service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value,", "variable: service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, \"*\", \"Alle\", \"100\",", "= local() if d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service():", "= local() if d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service():", "d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d = local() if d.__dict__.get('download') is", "stat_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id,", "in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable = None cache_result(variable, var_value=var_value, var_name=var_name,", "tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables ==", "cache_result(tables, stat_id=stat_id, _type='table_list') elif tables == EMPTY: return [] return tables def get_variable(var_value,", "\"*\", \"Alle\", \"100\", config.get('lang'), ) if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'),", "data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable = None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature')", "return d.__dict__.get('recherche') def get_download_service(): d = local() if d.__dict__.get('download') is None: d.__dict__['download'] =", "d = local() if d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def", "WebFault, wf: #print wf table = None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100,", "\"*\", \"\", \"100\", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e in", "service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"Alle\", format, False, str(from_year), str(to_year), \"*\", \"*\", \"*\", \"*\", \"*\",", "_type='statistic') if not statistic: service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, \"\",", "service.TabellenKatalog(config.get('user'), config.get('password'), \"%s-*\" % stat_id, \"code\", \"100\", config.get('lang')) tables = [] #print data", "if d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d =", "data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else:", "_type='table') if not table: service = get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id,", "suds import * import config from mongo import cache_result, load_cached, EMPTY from util", "\"%s-*\" % stat_id, \"code\", \"100\", config.get('lang')) tables = [] #print data if hasattr(data.objektKatalogEintraege,", "table: service = get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, \"All\", # format,", "= service.TabellenKatalog(config.get('user'), config.get('password'), table_id, \"\", \"100\", config.get('lang')) #print data if data.objektKatalogEintraege: table =", "get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if not statistic: service = get_recherche_service().service data =", "table_id, \"Alle\", format, False, str(from_year), str(to_year), \"*\", \"*\", \"*\", \"*\", \"*\", \"*\", \"*\",", "\"code\", \"100\", config.get('lang')) tables = [] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else:" ]
[ "< 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit =", "from pylimit import PyRateLimitException import unittest import time class TestPyLimit(unittest.TestCase): def test_exception(self): limit", "PyRateLimit from pylimit import PyRateLimitException import unittest import time class TestPyLimit(unittest.TestCase): def test_exception(self):", "test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit", "self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x", "'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0,", "unittest import time class TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt,", "limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for", "test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 10): self.assertTrue(limit.attempt('test_namespace2'))", "redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 10): self.assertTrue(limit.attempt('test_namespace2')) self.assertTrue(limit.is_rate_limited('test_namespace2')) time.sleep(10)", "def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 10):", "x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit", "def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 20):", "time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\",", "pylimit import PyRateLimit from pylimit import PyRateLimitException import unittest import time class TestPyLimit(unittest.TestCase):", "= PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10,", "def test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379)", "self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10)", "class TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self):", "redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 20): time.sleep(.5) if x", "self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x", "for x in range(0, 20): time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace'))", "from pylimit import PyRateLimit from pylimit import PyRateLimitException import unittest import time class", "range(0, 20): time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def", "in range(0, 20): time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace'))", "limit = PyRateLimit(period=10, limit=10) for x in range(0, 10): self.assertTrue(limit.attempt('test_namespace2')) self.assertTrue(limit.is_rate_limited('test_namespace2')) time.sleep(10) self.assertFalse(limit.is_rate_limited('test_namespace2'))", "import time class TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace')", "limit=10) for x in range(0, 20): time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace')) else:", "time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in", "= PyRateLimit(period=10, limit=10) for x in range(0, 20): time.sleep(.5) if x < 10:", "else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for", "import PyRateLimit from pylimit import PyRateLimitException import unittest import time class TestPyLimit(unittest.TestCase): def", "PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 10): self.assertTrue(limit.attempt('test_namespace2')) self.assertTrue(limit.is_rate_limited('test_namespace2'))", "import PyRateLimitException import unittest import time class TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10,", "TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\",", "PyRateLimit(period=10, limit=10) for x in range(0, 20): time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace'))", "pylimit import PyRateLimitException import unittest import time class TestPyLimit(unittest.TestCase): def test_exception(self): limit =", "PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10)", "time class TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def", "10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10,", "if x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379)", "self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0,", "x in range(0, 20): time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6)", "limit = PyRateLimit(period=10, limit=10) for x in range(0, 20): time.sleep(.5) if x <", "test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 20): time.sleep(.5)", "PyRateLimitException import unittest import time class TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10, limit=10)", "limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit =", "20): time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self):", "import unittest import time class TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException,", "PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 20): time.sleep(.5) if", "limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host=\"localhost\", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in", "<gh_stars>10-100 from pylimit import PyRateLimit from pylimit import PyRateLimitException import unittest import time" ]
[ "PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs): action_url = super().get_action_url(action, *args, **kwargs) if action", "return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), )", "score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score +=", "from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs): action_url =", "action_url = super().get_action_url(action, *args, **kwargs) if action == 'edit': action_url += '#tab-promote' return", "40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description)", "self.seo_title = seo_title self.search_description = search_description @property def title(self): return self.seo_title or self.page_title", "if action == 'edit': action_url += '#tab-promote' return action_url class SeoHelper: def __init__(self,", "if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if len(self.description)", "return self.search_description or '' @property def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property", "( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return score @property", ">= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ):", "@property def score(self): score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH')", "action_url += '#tab-promote' return action_url class SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None): self.page_title", "= self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40", "django.template.defaultfilters import truncatechars from wagtail.contrib.modeladmin.helpers import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting", "self.search_description = search_description @property def title(self): return self.seo_title or self.page_title @property def description(self):", "self.page_title = page_title self.seo_title = seo_title self.search_description = search_description @property def title(self): return", "self.score < 35: return '😢' elif self.score > 65: return '😄' return '😏'", "get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs): action_url = super().get_action_url(action, *args, **kwargs)", "( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count = self.title.split()", "'edit': action_url += '#tab-promote' return action_url class SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None):", "return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score = 0 if (", "if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <=", "*args, **kwargs): action_url = super().get_action_url(action, *args, **kwargs) if action == 'edit': action_url +=", "): score += 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if (", "self.page_title @property def description(self): return self.search_description or '' @property def truncated_title(self): return truncatechars(", "AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs): action_url", "class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs): action_url = super().get_action_url(action, *args, **kwargs) if", "score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score +=", "description(self): return self.search_description or '' @property def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), )", "get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'):", "score(self): score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score", "0: return '😱' elif self.score < 35: return '😢' elif self.score > 65:", "= 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10", "get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title)", "def icon(self): if self.score == 0: return '😱' elif self.score < 35: return", "import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action,", "or '' @property def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self):", "truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property", "get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return score @property def", "return self.seo_title or self.page_title @property def description(self): return self.search_description or '' @property def", "<= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return score @property def icon(self): if self.score", "): score += 10 title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <=", "<filename>src/wagtail_marketing/helpers.py from django.template.defaultfilters import truncatechars from wagtail.contrib.modeladmin.helpers import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf", "score += 25 return score @property def icon(self): if self.score == 0: return", "search_description @property def title(self): return self.seo_title or self.page_title @property def description(self): return self.search_description", "action_url class SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None): self.page_title = page_title self.seo_title =", "<= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return score @property def icon(self):", "len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return score @property def icon(self): if", "truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score = 0 if", "import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs): action_url = super().get_action_url(action, *args,", "if self.score == 0: return '😱' elif self.score < 35: return '😢' elif", "action == 'edit': action_url += '#tab-promote' return action_url class SeoHelper: def __init__(self, page_title,", "if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return score", "self.search_description or '' @property def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def", "truncatechars from wagtail.contrib.modeladmin.helpers import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper):", "wagtail.contrib.modeladmin.helpers import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self,", "<= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <=", "get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if", "self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if", "= super().get_action_url(action, *args, **kwargs) if action == 'edit': action_url += '#tab-promote' return action_url", "get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self):", "<= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score", "10 title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score", ") @property def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score", "truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH')", "25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return", "@property def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return truncatechars(", "<= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count = self.title.split() if (", "def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score = 0", "get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return score @property def icon(self): if self.score ==", "seo_title=None, search_description=None): self.page_title = page_title self.seo_title = seo_title self.search_description = search_description @property def", "page_title self.seo_title = seo_title self.search_description = search_description @property def title(self): return self.seo_title or", "action, *args, **kwargs): action_url = super().get_action_url(action, *args, **kwargs) if action == 'edit': action_url", "super().get_action_url(action, *args, **kwargs) if action == 'edit': action_url += '#tab-promote' return action_url class", "return action_url class SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None): self.page_title = page_title self.seo_title", "def __init__(self, page_title, seo_title=None, search_description=None): self.page_title = page_title self.seo_title = seo_title self.search_description =", "get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count)", "as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs):", "def get_action_url(self, action, *args, **kwargs): action_url = super().get_action_url(action, *args, **kwargs) if action ==", "@property def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score =", "import truncatechars from wagtail.contrib.modeladmin.helpers import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class", "<= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25", "icon(self): if self.score == 0: return '😱' elif self.score < 35: return '😢'", "from wagtail.contrib.modeladmin.helpers import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def", "return score @property def icon(self): if self.score == 0: return '😱' elif self.score", "+= 10 title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ):", "): score += 25 return score @property def icon(self): if self.score == 0:", "seo_title self.search_description = search_description @property def title(self): return self.seo_title or self.page_title @property def", "def title(self): return self.seo_title or self.page_title @property def description(self): return self.search_description or ''", "**kwargs) if action == 'edit': action_url += '#tab-promote' return action_url class SeoHelper: def", "PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args,", "0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count", "== 'edit': action_url += '#tab-promote' return action_url class SeoHelper: def __init__(self, page_title, seo_title=None,", "elif self.score < 35: return '😢' elif self.score > 65: return '😄' return", "title(self): return self.seo_title or self.page_title @property def description(self): return self.search_description or '' @property", "= page_title self.seo_title = seo_title self.search_description = search_description @property def title(self): return self.seo_title", "**kwargs): action_url = super().get_action_url(action, *args, **kwargs) if action == 'edit': action_url += '#tab-promote'", "len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH')", "score += 10 title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT')", "from django.template.defaultfilters import truncatechars from wagtail.contrib.modeladmin.helpers import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import", "@property def title(self): return self.seo_title or self.page_title @property def description(self): return self.search_description or", "'#tab-promote' return action_url class SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None): self.page_title = page_title", "@property def description(self): return self.search_description or '' @property def truncated_title(self): return truncatechars( self.title,", "search_description=None): self.page_title = page_title self.seo_title = seo_title self.search_description = search_description @property def title(self):", "len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT')", "self.score == 0: return '😱' elif self.score < 35: return '😢' elif self.score", "len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score +=", "25 return score @property def icon(self): if self.score == 0: return '😱' elif", "if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count =", "self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def", "score @property def icon(self): if self.score == 0: return '😱' elif self.score <", "get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score", "self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <=", "+= 25 return score @property def icon(self): if self.score == 0: return '😱'", "SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None): self.page_title = page_title self.seo_title = seo_title self.search_description", "+= 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25", ") @property def score(self): score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <=", "__init__(self, page_title, seo_title=None, search_description=None): self.page_title = page_title self.seo_title = seo_title self.search_description = search_description", "truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'),", "get_action_url(self, action, *args, **kwargs): action_url = super().get_action_url(action, *args, **kwargs) if action == 'edit':", "or self.page_title @property def description(self): return self.search_description or '' @property def truncated_title(self): return", "'' @property def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return", "+= '#tab-promote' return action_url class SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None): self.page_title =", "wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs): action_url = super().get_action_url(action,", "return '😱' elif self.score < 35: return '😢' elif self.score > 65: return", "@property def icon(self): if self.score == 0: return '😱' elif self.score < 35:", "def description(self): return self.search_description or '' @property def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'),", "== 0: return '😱' elif self.score < 35: return '😢' elif self.score >", "title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score +=", "( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if len(self.description) >=", "= search_description @property def title(self): return self.seo_title or self.page_title @property def description(self): return", "score += 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH')", "= seo_title self.search_description = search_description @property def title(self): return self.seo_title or self.page_title @property", "class SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None): self.page_title = page_title self.seo_title = seo_title", "+= 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <=", "get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count = self.title.split() if", "page_title, seo_title=None, search_description=None): self.page_title = page_title self.seo_title = seo_title self.search_description = search_description @property", "def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return truncatechars( self.description,", "def score(self): score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ):", "'😱' elif self.score < 35: return '😢' elif self.score > 65: return '😄'", "*args, **kwargs) if action == 'edit': action_url += '#tab-promote' return action_url class SeoHelper:", "self.seo_title or self.page_title @property def description(self): return self.search_description or '' @property def truncated_title(self):" ]
[ "EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict(): # How detected emojis map to sentiment", "How detected emojis map to sentiment score emoji_to_sentiment = { # very strongly", "axis=1) / np.sqrt((np.sum(a * a, axis=1) * np.sum(b * b, axis=1))) def _get_emojis():", "= _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items():", ":notes: :flushed: \" + \\ \":100: :sleeping: :relieved: :relaxed: :raised_hands: \" + \\", "# very strongly negative ':unamused:': -1, ':angry:': -1, # removing ':hand:': -1 due", "-0.5, # strongly negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75, #", "removing ':hand:': -1 due to ambiguity ':rage:': -1 } return emoji_to_sentiment def _get_sentiment_multiplier():", "positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5,", "+ \\ \":sleepy: :yum: :triumph: :hand: :mask: :clap: :eyes: :gun: \" + \\", ":flushed: \" + \\ \":100: :sleeping: :relieved: :relaxed: :raised_hands: \" + \\ \":two_hearts:", "':angry:': -1, # removing ':hand:': -1 due to ambiguity ':rage:': -1 } return", ":sunglasses: :rage: :thumbsup: :cry: \" + \\ \":sleepy: :yum: :triumph: :hand: :mask: :clap:", "positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75,", ":clap: :eyes: :gun: \" + \\ \":persevere: :smiling_imp: :sweat: :broken_heart: \" + \\", "0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative", "axis=1) * np.sum(b * b, axis=1))) def _get_emojis(): # All emojis in the", "':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1, # strongly positive ':blush:': 0.75, ':wink:': 0.75,", "+ \\ \":persevere: :smiling_imp: :sweat: :broken_heart: \" + \\ \":yellow_heart: :musical_note: :speak_no_evil: :wink:", "the order returned by deepmoji EMOJIS = \":joy: :unamused: :weary: :sob: :heart_eyes: :pensive:", ":expressionless: :sweat_smile: :pray: \" + \\ \":confused: :kissing_heart: :heartbeat: :neutral_face: \" + \\", "def cosine_similarity(a, b): return np.sum(a * b, axis=1) / np.sqrt((np.sum(a * a, axis=1)", ":smiling_imp: :sweat: :broken_heart: \" + \\ \":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: \" +", "\\ \":persevere: :smiling_imp: :sweat: :broken_heart: \" + \\ \":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull:", ":sweat: :broken_heart: \" + \\ \":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: \" + \\", "emojis map to sentiment score emoji_to_sentiment = { # very strongly positive ':kissing_heart:':", "by deepmoji EMOJIS = \":joy: :unamused: :weary: :sob: :heart_eyes: :pensive: \" + \\", "\" + \\ \":persevere: :smiling_imp: :sweat: :broken_heart: \" + \\ \":yellow_heart: :musical_note: :speak_no_evil:", "np.sum(b * b, axis=1))) def _get_emojis(): # All emojis in the order returned", ":purple_heart: \" + \\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS = EMOJIS.split(' ') return", "':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:':", "# positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:':", "detected emojis map to sentiment score emoji_to_sentiment = { # very strongly positive", ":neutral_face: \" + \\ \":information_desk_person: :disappointed: :see_no_evil: \" + \\ \":tired_face: :v: :sunglasses:", ":skull: \" + \\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \" + \\ \":no_good: :muscle:", "\":confused: :kissing_heart: :heartbeat: :neutral_face: \" + \\ \":information_desk_person: :disappointed: :see_no_evil: \" + \\", "# strongly positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75,", "_get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items(): loc = EMOJIS.index(emoji) sentiment_multiplier[loc]", ":heart_eyes: :pensive: \" + \\ \":ok_hand: :blush: :heart: :smirk: :grin: :notes: :flushed: \"", ":grin: :notes: :flushed: \" + \\ \":100: :sleeping: :relieved: :relaxed: :raised_hands: \" +", "= _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items(): loc = EMOJIS.index(emoji)", "-1 } return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier", "All emojis in the order returned by deepmoji EMOJIS = \":joy: :unamused: :weary:", "b, axis=1) / np.sqrt((np.sum(a * a, axis=1) * np.sum(b * b, axis=1))) def", "EMOJIS = \":joy: :unamused: :weary: :sob: :heart_eyes: :pensive: \" + \\ \":ok_hand: :blush:", "# strongly negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75, # very", "_get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items(): loc", "':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:':", "\":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS = EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict(): #", "negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, # strongly", "def _get_emoji_to_sentiment_dict(): # How detected emojis map to sentiment score emoji_to_sentiment = {", ":blue_heart: :grimacing: :sparkles:\" EMOJIS = EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict(): # How", ":blush: :heart: :smirk: :grin: :notes: :flushed: \" + \\ \":100: :sleeping: :relieved: :relaxed:", "emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items(): loc =", "<filename>src/metric_helpers.py import numpy as np def cosine_similarity(a, b): return np.sum(a * b, axis=1)", "\" + \\ \":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry: \" + \\ \":sleepy:", "-0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75, # very strongly negative ':unamused:': -1,", ":sparkles:\" EMOJIS = EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict(): # How detected emojis", "emoji_to_sentiment = { # very strongly positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1,", "-0.75, # very strongly negative ':unamused:': -1, ':angry:': -1, # removing ':hand:': -1", "order returned by deepmoji EMOJIS = \":joy: :unamused: :weary: :sob: :heart_eyes: :pensive: \"", "* np.sum(b * b, axis=1))) def _get_emojis(): # All emojis in the order", "positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1, # strongly positive ':blush:':", "np.sqrt((np.sum(a * a, axis=1) * np.sum(b * b, axis=1))) def _get_emojis(): # All", "0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative ':disappointed:': -0.5, ':eyes:': -0.5,", "map to sentiment score emoji_to_sentiment = { # very strongly positive ':kissing_heart:': 1,", "':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, #", "-0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, # strongly negative ':neutral_face:': -0.75, ':confused:':", ":sob: :heart_eyes: :pensive: \" + \\ \":ok_hand: :blush: :heart: :smirk: :grin: :notes: :flushed:", "':confounded:': -0.75, # very strongly negative ':unamused:': -1, ':angry:': -1, # removing ':hand:':", "\\ \":two_hearts: :expressionless: :sweat_smile: :pray: \" + \\ \":confused: :kissing_heart: :heartbeat: :neutral_face: \"", "+ \\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS = EMOJIS.split(' ') return EMOJIS def", "_get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val", "score emoji_to_sentiment = { # very strongly positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:':", ":thumbsup: :cry: \" + \\ \":sleepy: :yum: :triumph: :hand: :mask: :clap: :eyes: :gun:", "':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:':", "-1 due to ambiguity ':rage:': -1 } return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS =", "\\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS = EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict():", ":pensive: \" + \\ \":ok_hand: :blush: :heart: :smirk: :grin: :notes: :flushed: \" +", "/ np.sqrt((np.sum(a * a, axis=1) * np.sum(b * b, axis=1))) def _get_emojis(): #", "\":information_desk_person: :disappointed: :see_no_evil: \" + \\ \":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry: \"", "':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative ':disappointed:':", ":v: :sunglasses: :rage: :thumbsup: :cry: \" + \\ \":sleepy: :yum: :triumph: :hand: :mask:", "np.sum(a * b, axis=1) / np.sqrt((np.sum(a * a, axis=1) * np.sum(b * b,", "# How detected emojis map to sentiment score emoji_to_sentiment = { # very", "\\ \":100: :sleeping: :relieved: :relaxed: :raised_hands: \" + \\ \":two_hearts: :expressionless: :sweat_smile: :pray:", "strongly positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1, # strongly positive", ":grimacing: :sparkles:\" EMOJIS = EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict(): # How detected", "\":persevere: :smiling_imp: :sweat: :broken_heart: \" + \\ \":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: \"", "\\ \":sleepy: :yum: :triumph: :hand: :mask: :clap: :eyes: :gun: \" + \\ \":persevere:", "':yellow_heart:': 0.5, # negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:':", "_get_emoji_to_sentiment_dict(): # How detected emojis map to sentiment score emoji_to_sentiment = { #", "0.5, # negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5,", "\":100: :sleeping: :relieved: :relaxed: :raised_hands: \" + \\ \":two_hearts: :expressionless: :sweat_smile: :pray: \"", "\\ \":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry: \" + \\ \":sleepy: :yum: :triumph:", "EMOJIS def _get_emoji_to_sentiment_dict(): # How detected emojis map to sentiment score emoji_to_sentiment =", "':triumph:': -0.75, ':confounded:': -0.75, # very strongly negative ':unamused:': -1, ':angry:': -1, #", "np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items(): loc = EMOJIS.index(emoji) sentiment_multiplier[loc] = reward_val return", ":stuck_out_tongue_winking_eye: :angry: \" + \\ \":no_good: :muscle: :facepunch: :purple_heart: \" + \\ \":sparkling_heart:", "to sentiment score emoji_to_sentiment = { # very strongly positive ':kissing_heart:': 1, ':thumbsup:':", ":mask: :clap: :eyes: :gun: \" + \\ \":persevere: :smiling_imp: :sweat: :broken_heart: \" +", "\" + \\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \" + \\ \":no_good: :muscle: :facepunch:", "':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:':", ":eyes: :gun: \" + \\ \":persevere: :smiling_imp: :sweat: :broken_heart: \" + \\ \":yellow_heart:", "\\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \" + \\ \":no_good: :muscle: :facepunch: :purple_heart: \"", ":pray: \" + \\ \":confused: :kissing_heart: :heartbeat: :neutral_face: \" + \\ \":information_desk_person: :disappointed:", "':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, # strongly negative", "= EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict(): # How detected emojis map to", "axis=1))) def _get_emojis(): # All emojis in the order returned by deepmoji EMOJIS", "+ \\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \" + \\ \":no_good: :muscle: :facepunch: :purple_heart:", "\" + \\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS = EMOJIS.split(' ') return EMOJIS", "= { # very strongly positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:':", "\\ \":no_good: :muscle: :facepunch: :purple_heart: \" + \\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS", "def _get_emojis(): # All emojis in the order returned by deepmoji EMOJIS =", "\" + \\ \":confused: :kissing_heart: :heartbeat: :neutral_face: \" + \\ \":information_desk_person: :disappointed: :see_no_evil:", "':heart_eyes:': 0.75, ':100:': 0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:':", "+ \\ \":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: \" + \\ \":confounded: :smile: :stuck_out_tongue_winking_eye:", "strongly negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75, # very strongly", ":rage: :thumbsup: :cry: \" + \\ \":sleepy: :yum: :triumph: :hand: :mask: :clap: :eyes:", "':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75, # very strongly negative ':unamused:':", ":cry: \" + \\ \":sleepy: :yum: :triumph: :hand: :mask: :clap: :eyes: :gun: \"", "EMOJIS = EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict(): # How detected emojis map", ":relaxed: :raised_hands: \" + \\ \":two_hearts: :expressionless: :sweat_smile: :pray: \" + \\ \":confused:", "':ok_hand:': 1, ':smile:': 1, # strongly positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75,", "0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5,", "0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5,", "':rage:': -1 } return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict()", "+ \\ \":no_good: :muscle: :facepunch: :purple_heart: \" + \\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\"", "-1, # removing ':hand:': -1 due to ambiguity ':rage:': -1 } return emoji_to_sentiment", "+ \\ \":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry: \" + \\ \":sleepy: :yum:", ":sweat_smile: :pray: \" + \\ \":confused: :kissing_heart: :heartbeat: :neutral_face: \" + \\ \":information_desk_person:", "+ \\ \":two_hearts: :expressionless: :sweat_smile: :pray: \" + \\ \":confused: :kissing_heart: :heartbeat: :neutral_face:", "-0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, # strongly negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:':", "':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75, #", "':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75, # very strongly negative ':unamused:': -1, ':angry:':", "cosine_similarity(a, b): return np.sum(a * b, axis=1) / np.sqrt((np.sum(a * a, axis=1) *", "return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS))", "0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5,", "EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val in", ":broken_heart: \" + \\ \":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: \" + \\ \":confounded:", "-0.5, ':grimacing:': -0.5, # strongly negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:':", "':100:': 0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:':", "ambiguity ':rage:': -1 } return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment =", "* b, axis=1) / np.sqrt((np.sum(a * a, axis=1) * np.sum(b * b, axis=1)))", "':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:':", "1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1, # strongly positive ':blush:': 0.75, ':wink:':", "1, ':ok_hand:': 1, ':smile:': 1, # strongly positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:':", "0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75, # positive", "numpy as np def cosine_similarity(a, b): return np.sum(a * b, axis=1) / np.sqrt((np.sum(a", "import numpy as np def cosine_similarity(a, b): return np.sum(a * b, axis=1) /", "\":sleepy: :yum: :triumph: :hand: :mask: :clap: :eyes: :gun: \" + \\ \":persevere: :smiling_imp:", "\":ok_hand: :blush: :heart: :smirk: :grin: :notes: :flushed: \" + \\ \":100: :sleeping: :relieved:", ":see_no_evil: \" + \\ \":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry: \" + \\", "1, # strongly positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:':", "-1, ':angry:': -1, # removing ':hand:': -1 due to ambiguity ':rage:': -1 }", ":angry: \" + \\ \":no_good: :muscle: :facepunch: :purple_heart: \" + \\ \":sparkling_heart: :blue_heart:", "b, axis=1))) def _get_emojis(): # All emojis in the order returned by deepmoji", "negative ':unamused:': -1, ':angry:': -1, # removing ':hand:': -1 due to ambiguity ':rage:':", "_get_emojis(): # All emojis in the order returned by deepmoji EMOJIS = \":joy:", ":triumph: :hand: :mask: :clap: :eyes: :gun: \" + \\ \":persevere: :smiling_imp: :sweat: :broken_heart:", "') return EMOJIS def _get_emoji_to_sentiment_dict(): # How detected emojis map to sentiment score", "negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75, # very strongly negative", "+ \\ \":information_desk_person: :disappointed: :see_no_evil: \" + \\ \":tired_face: :v: :sunglasses: :rage: :thumbsup:", ":hand: :mask: :clap: :eyes: :gun: \" + \\ \":persevere: :smiling_imp: :sweat: :broken_heart: \"", "def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji,", "0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5,", "{ # very strongly positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1,", ":kissing_heart: :heartbeat: :neutral_face: \" + \\ \":information_desk_person: :disappointed: :see_no_evil: \" + \\ \":tired_face:", ":smile: :stuck_out_tongue_winking_eye: :angry: \" + \\ \":no_good: :muscle: :facepunch: :purple_heart: \" + \\", "+ \\ \":ok_hand: :blush: :heart: :smirk: :grin: :notes: :flushed: \" + \\ \":100:", "':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:':", "very strongly positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1, # strongly", "deepmoji EMOJIS = \":joy: :unamused: :weary: :sob: :heart_eyes: :pensive: \" + \\ \":ok_hand:", "0.75, ':100:': 0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5,", "# All emojis in the order returned by deepmoji EMOJIS = \":joy: :unamused:", "\\ \":ok_hand: :blush: :heart: :smirk: :grin: :notes: :flushed: \" + \\ \":100: :sleeping:", "\" + \\ \":no_good: :muscle: :facepunch: :purple_heart: \" + \\ \":sparkling_heart: :blue_heart: :grimacing:", "':unamused:': -1, ':angry:': -1, # removing ':hand:': -1 due to ambiguity ':rage:': -1", ":smirk: :grin: :notes: :flushed: \" + \\ \":100: :sleeping: :relieved: :relaxed: :raised_hands: \"", ":relieved: :relaxed: :raised_hands: \" + \\ \":two_hearts: :expressionless: :sweat_smile: :pray: \" + \\", "-0.75, ':triumph:': -0.75, ':confounded:': -0.75, # very strongly negative ':unamused:': -1, ':angry:': -1,", ":sleeping: :relieved: :relaxed: :raised_hands: \" + \\ \":two_hearts: :expressionless: :sweat_smile: :pray: \" +", "\" + \\ \":sleepy: :yum: :triumph: :hand: :mask: :clap: :eyes: :gun: \" +", "':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, # strongly negative ':neutral_face:': -0.75,", "to ambiguity ':rage:': -1 } return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment", "0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5,", ":wink: :skull: \" + \\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \" + \\ \":no_good:", "+ \\ \":100: :sleeping: :relieved: :relaxed: :raised_hands: \" + \\ \":two_hearts: :expressionless: :sweat_smile:", "\":two_hearts: :expressionless: :sweat_smile: :pray: \" + \\ \":confused: :kissing_heart: :heartbeat: :neutral_face: \" +", ":yum: :triumph: :hand: :mask: :clap: :eyes: :gun: \" + \\ \":persevere: :smiling_imp: :sweat:", ":gun: \" + \\ \":persevere: :smiling_imp: :sweat: :broken_heart: \" + \\ \":yellow_heart: :musical_note:", ":muscle: :facepunch: :purple_heart: \" + \\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS = EMOJIS.split('", "emojis in the order returned by deepmoji EMOJIS = \":joy: :unamused: :weary: :sob:", "\\ \":information_desk_person: :disappointed: :see_no_evil: \" + \\ \":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry:", "\":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: \" + \\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \"", "as np def cosine_similarity(a, b): return np.sum(a * b, axis=1) / np.sqrt((np.sum(a *", ":weary: :sob: :heart_eyes: :pensive: \" + \\ \":ok_hand: :blush: :heart: :smirk: :grin: :notes:", "sentiment score emoji_to_sentiment = { # very strongly positive ':kissing_heart:': 1, ':thumbsup:': 1,", "':sleeping:': -0.5, ':grimacing:': -0.5, # strongly negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75,", ":heart: :smirk: :grin: :notes: :flushed: \" + \\ \":100: :sleeping: :relieved: :relaxed: :raised_hands:", "\":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry: \" + \\ \":sleepy: :yum: :triumph: :hand:", "0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5,", "very strongly negative ':unamused:': -1, ':angry:': -1, # removing ':hand:': -1 due to", "1, ':smile:': 1, # strongly positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:':", "= np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items(): loc = EMOJIS.index(emoji) sentiment_multiplier[loc] = reward_val", "for emoji, reward_val in emojis_to_sentiment.items(): loc = EMOJIS.index(emoji) sentiment_multiplier[loc] = reward_val return sentiment_multiplier", ":speak_no_evil: :wink: :skull: \" + \\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \" + \\", "a, axis=1) * np.sum(b * b, axis=1))) def _get_emojis(): # All emojis in", "\\ \":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: \" + \\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry:", "* b, axis=1))) def _get_emojis(): # All emojis in the order returned by", "+ \\ \":confused: :kissing_heart: :heartbeat: :neutral_face: \" + \\ \":information_desk_person: :disappointed: :see_no_evil: \"", "# negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, #", "sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items(): loc = EMOJIS.index(emoji) sentiment_multiplier[loc] =", "\" + \\ \":two_hearts: :expressionless: :sweat_smile: :pray: \" + \\ \":confused: :kissing_heart: :heartbeat:", "\" + \\ \":information_desk_person: :disappointed: :see_no_evil: \" + \\ \":tired_face: :v: :sunglasses: :rage:", "':smile:': 1, # strongly positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75,", "* a, axis=1) * np.sum(b * b, axis=1))) def _get_emojis(): # All emojis", ":musical_note: :speak_no_evil: :wink: :skull: \" + \\ \":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \" +", "np def cosine_similarity(a, b): return np.sum(a * b, axis=1) / np.sqrt((np.sum(a * a,", "\":no_good: :muscle: :facepunch: :purple_heart: \" + \\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS =", ":raised_hands: \" + \\ \":two_hearts: :expressionless: :sweat_smile: :pray: \" + \\ \":confused: :kissing_heart:", "':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1, # strongly positive ':blush:': 0.75,", "# removing ':hand:': -1 due to ambiguity ':rage:': -1 } return emoji_to_sentiment def", "due to ambiguity ':rage:': -1 } return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis()", "emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for", "-0.75, ':confounded:': -0.75, # very strongly negative ':unamused:': -1, ':angry:': -1, # removing", "\" + \\ \":ok_hand: :blush: :heart: :smirk: :grin: :notes: :flushed: \" + \\", "\\ \":confused: :kissing_heart: :heartbeat: :neutral_face: \" + \\ \":information_desk_person: :disappointed: :see_no_evil: \" +", "# very strongly positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1, #", "b): return np.sum(a * b, axis=1) / np.sqrt((np.sum(a * a, axis=1) * np.sum(b", "0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75, # positive ':smirk:': 0.5,", ":unamused: :weary: :sob: :heart_eyes: :pensive: \" + \\ \":ok_hand: :blush: :heart: :smirk: :grin:", "\":joy: :unamused: :weary: :sob: :heart_eyes: :pensive: \" + \\ \":ok_hand: :blush: :heart: :smirk:", "\":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: \" + \\ \":no_good: :muscle: :facepunch: :purple_heart: \" +", "':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative ':disappointed:': -0.5, ':eyes:':", "return EMOJIS def _get_emoji_to_sentiment_dict(): # How detected emojis map to sentiment score emoji_to_sentiment", "0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative ':disappointed:': -0.5,", "':grimacing:': -0.5, # strongly negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75,", "0.75, ':heart_eyes:': 0.75, ':100:': 0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5,", "':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, # strongly negative ':neutral_face:': -0.75, ':confused:': -0.75,", "returned by deepmoji EMOJIS = \":joy: :unamused: :weary: :sob: :heart_eyes: :pensive: \" +", "-0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, # strongly negative ':neutral_face:':", "':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75, # positive ':smirk:':", "':hand:': -1 due to ambiguity ':rage:': -1 } return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS", "0.5, ':yellow_heart:': 0.5, # negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5,", ":heartbeat: :neutral_face: \" + \\ \":information_desk_person: :disappointed: :see_no_evil: \" + \\ \":tired_face: :v:", ":facepunch: :purple_heart: \" + \\ \":sparkling_heart: :blue_heart: :grimacing: :sparkles:\" EMOJIS = EMOJIS.split(' ')", "strongly negative ':unamused:': -1, ':angry:': -1, # removing ':hand:': -1 due to ambiguity", "= \":joy: :unamused: :weary: :sob: :heart_eyes: :pensive: \" + \\ \":ok_hand: :blush: :heart:", "\" + \\ \":100: :sleeping: :relieved: :relaxed: :raised_hands: \" + \\ \":two_hearts: :expressionless:", "\" + \\ \":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: \" + \\ \":confounded: :smile:", "} return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier =", "strongly positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:':", "in the order returned by deepmoji EMOJIS = \":joy: :unamused: :weary: :sob: :heart_eyes:", ":disappointed: :see_no_evil: \" + \\ \":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry: \" +", "':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:':", "return np.sum(a * b, axis=1) / np.sqrt((np.sum(a * a, axis=1) * np.sum(b *" ]
[ "for i in range(l): for j in range(i + 1, l): if nums[j]", "selectSort(nums): l = len(nums) for i in range(l): for j in range(i +", "range(i + 1, l): if nums[j] < nums[i]: nums[i], nums[j] = nums[j], nums[i]", "len(nums) for i in range(l): for j in range(i + 1, l): if", "1, l): if nums[j] < nums[i]: nums[i], nums[j] = nums[j], nums[i] return nums", "def selectSort(nums): l = len(nums) for i in range(l): for j in range(i", "range(l): for j in range(i + 1, l): if nums[j] < nums[i]: nums[i],", "for j in range(i + 1, l): if nums[j] < nums[i]: nums[i], nums[j]", "= len(nums) for i in range(l): for j in range(i + 1, l):", "+ 1, l): if nums[j] < nums[i]: nums[i], nums[j] = nums[j], nums[i] return", "j in range(i + 1, l): if nums[j] < nums[i]: nums[i], nums[j] =", "i in range(l): for j in range(i + 1, l): if nums[j] <", "in range(l): for j in range(i + 1, l): if nums[j] < nums[i]:", "l = len(nums) for i in range(l): for j in range(i + 1,", "in range(i + 1, l): if nums[j] < nums[i]: nums[i], nums[j] = nums[j]," ]
[ "for key, value in dic.iteritems(): string = string.replace(key, str(value)) nums = [int(i) for", "in range(n-1): if nums[i]+1 == nums[i+1]: nums[i] = nums[i+1] = -2 stack =", "= len(nums)# the length of the string for i in range(n-1): if nums[i]+1", "validBraces(string): braces = [\"(\",\")\",\"{\",\"}\",\"[\",\"]\"] labels = [0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid =", "of the string for i in range(n-1): if nums[i]+1 == nums[i+1]: nums[i] =", "string for i in range(n-1): if nums[i]+1 == nums[i+1]: nums[i] = nums[i+1] =", "if nums[i]+1 == nums[i+1]: nums[i] = nums[i+1] = -2 stack = list(filter(lambda x:", "in string] n = len(nums)# the length of the string for i in", "= list(filter(lambda x: x >= 0, nums)) if len(stack) > 0: valid =", "[int(i) for i in string] n = len(nums)# the length of the string", "nums[i] = nums[i+1] = -2 stack = list(filter(lambda x: x >= 0, nums))", "valid = True for key, value in dic.iteritems(): string = string.replace(key, str(value)) nums", ">= 0, nums)) if len(stack) > 0: valid = all(stack[i]+1 == stack[-i-1] for", "stack = list(filter(lambda x: x >= 0, nums)) if len(stack) > 0: valid", "list(filter(lambda x: x >= 0, nums)) if len(stack) > 0: valid = all(stack[i]+1", "True for key, value in dic.iteritems(): string = string.replace(key, str(value)) nums = [int(i)", "= [\"(\",\")\",\"{\",\"}\",\"[\",\"]\"] labels = [0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid = True for", "labels)) valid = True for key, value in dic.iteritems(): string = string.replace(key, str(value))", "[0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid = True for key, value in dic.iteritems():", "the string for i in range(n-1): if nums[i]+1 == nums[i+1]: nums[i] = nums[i+1]", "= nums[i+1] = -2 stack = list(filter(lambda x: x >= 0, nums)) if", "str(value)) nums = [int(i) for i in string] n = len(nums)# the length", "range(n-1): if nums[i]+1 == nums[i+1]: nums[i] = nums[i+1] = -2 stack = list(filter(lambda", "key, value in dic.iteritems(): string = string.replace(key, str(value)) nums = [int(i) for i", "= string.replace(key, str(value)) nums = [int(i) for i in string] n = len(nums)#", "nums[i+1] = -2 stack = list(filter(lambda x: x >= 0, nums)) if len(stack)", "def validBraces(string): braces = [\"(\",\")\",\"{\",\"}\",\"[\",\"]\"] labels = [0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid", "if len(stack) > 0: valid = all(stack[i]+1 == stack[-i-1] for i in range(len(stack)/2))", "string] n = len(nums)# the length of the string for i in range(n-1):", "0, nums)) if len(stack) > 0: valid = all(stack[i]+1 == stack[-i-1] for i", "== nums[i+1]: nums[i] = nums[i+1] = -2 stack = list(filter(lambda x: x >=", "x >= 0, nums)) if len(stack) > 0: valid = all(stack[i]+1 == stack[-i-1]", "x: x >= 0, nums)) if len(stack) > 0: valid = all(stack[i]+1 ==", "len(nums)# the length of the string for i in range(n-1): if nums[i]+1 ==", "length of the string for i in range(n-1): if nums[i]+1 == nums[i+1]: nums[i]", "nums[i+1]: nums[i] = nums[i+1] = -2 stack = list(filter(lambda x: x >= 0,", "dic = dict(zip(braces, labels)) valid = True for key, value in dic.iteritems(): string", "for i in string] n = len(nums)# the length of the string for", "braces = [\"(\",\")\",\"{\",\"}\",\"[\",\"]\"] labels = [0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid = True", "[\"(\",\")\",\"{\",\"}\",\"[\",\"]\"] labels = [0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid = True for key,", "= True for key, value in dic.iteritems(): string = string.replace(key, str(value)) nums =", "dic.iteritems(): string = string.replace(key, str(value)) nums = [int(i) for i in string] n", "= [int(i) for i in string] n = len(nums)# the length of the", "i in string] n = len(nums)# the length of the string for i", "= [0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid = True for key, value in", "> 0: valid = all(stack[i]+1 == stack[-i-1] for i in range(len(stack)/2)) return valid", "n = len(nums)# the length of the string for i in range(n-1): if", "nums[i]+1 == nums[i+1]: nums[i] = nums[i+1] = -2 stack = list(filter(lambda x: x", "len(stack) > 0: valid = all(stack[i]+1 == stack[-i-1] for i in range(len(stack)/2)) return", "for i in range(n-1): if nums[i]+1 == nums[i+1]: nums[i] = nums[i+1] = -2", "the length of the string for i in range(n-1): if nums[i]+1 == nums[i+1]:", "= -2 stack = list(filter(lambda x: x >= 0, nums)) if len(stack) >", "nums)) if len(stack) > 0: valid = all(stack[i]+1 == stack[-i-1] for i in", "= dict(zip(braces, labels)) valid = True for key, value in dic.iteritems(): string =", "dict(zip(braces, labels)) valid = True for key, value in dic.iteritems(): string = string.replace(key,", "nums = [int(i) for i in string] n = len(nums)# the length of", "-2 stack = list(filter(lambda x: x >= 0, nums)) if len(stack) > 0:", "labels = [0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid = True for key, value", "in dic.iteritems(): string = string.replace(key, str(value)) nums = [int(i) for i in string]", "string = string.replace(key, str(value)) nums = [int(i) for i in string] n =", "string.replace(key, str(value)) nums = [int(i) for i in string] n = len(nums)# the", "value in dic.iteritems(): string = string.replace(key, str(value)) nums = [int(i) for i in", "i in range(n-1): if nums[i]+1 == nums[i+1]: nums[i] = nums[i+1] = -2 stack" ]
[ "import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot()", "names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X = [i for i", "pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X = [i", "numpy.reshape(X, (len(X), 1)) y = data.values LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend", "in range(0, len(data))] X = numpy.reshape(X, (len(X), 1)) y = data.values LModel =", "LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive')", "print(data.describe()) data.plot() plt.show() X = [i for i in range(0, len(data))] X =", "print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X = [i for i in range(0, len(data))]", "trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov=", "(len(X), 1)) y = data.values LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend =", "plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot() plt.show()", "DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot() plt.show() DecompDataMult = seasonal_decompose(data,", "print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show()", "import matplotlib.pyplot as plt import numpy from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import", "= seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot() plt.show() DecompDataMult = seasonal_decompose(data, model='multiplicative')", "plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot() plt.show() DecompDataMult", "data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X", "len(data))] X = numpy.reshape(X, (len(X), 1)) y = data.values LModel = LinearRegression() LModel.fit(X,", "from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime',", "model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot() plt.show() DecompDataMult = seasonal_decompose(data, model='multiplicative') DecompDataMult.plot() plt.show()", "= data.values LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend)", "plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot() plt.show() DecompDataMult =", "data.values LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show()", "import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None)", "LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot()", "y = data.values LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y)", "LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd", "i in range(0, len(data))] X = numpy.reshape(X, (len(X), 1)) y = data.values LModel", "import numpy from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True,", "as plt import numpy from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data", "LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info())", "plt.show() X = [i for i in range(0, len(data))] X = numpy.reshape(X, (len(X),", "matplotlib.pyplot as plt import numpy from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose", "'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X = [i for i in", "range(0, len(data))] X = numpy.reshape(X, (len(X), 1)) y = data.values LModel = LinearRegression()", "pd import matplotlib.pyplot as plt import numpy from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal", "y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot()", "for i in range(0, len(data))] X = numpy.reshape(X, (len(X), 1)) y = data.values", "= numpy.reshape(X, (len(X), 1)) y = data.values LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_)", "sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'],", "from statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head())", "index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X = [i for", "print(data.head()) print(data.describe()) data.plot() plt.show() X = [i for i in range(0, len(data))] X", "data.plot() plt.show() X = [i for i in range(0, len(data))] X = numpy.reshape(X,", "plt import numpy from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data =", "seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show()", "import pandas as pd import matplotlib.pyplot as plt import numpy from sklearn.linear_model import", "X = [i for i in range(0, len(data))] X = numpy.reshape(X, (len(X), 1))", "= LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal", "statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe())", "<gh_stars>10-100 import pandas as pd import matplotlib.pyplot as plt import numpy from sklearn.linear_model", "[i for i in range(0, len(data))] X = numpy.reshape(X, (len(X), 1)) y =", "seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot() plt.show() DecompDataMult = seasonal_decompose(data, model='multiplicative') DecompDataMult.plot()", "= LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd =", "1)) y = data.values LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X)", "LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data,", "as pd import matplotlib.pyplot as plt import numpy from sklearn.linear_model import LinearRegression from", "X = numpy.reshape(X, (len(X), 1)) y = data.values LModel = LinearRegression() LModel.fit(X, y)", "numpy from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime',", "pandas as pd import matplotlib.pyplot as plt import numpy from sklearn.linear_model import LinearRegression", "= [i for i in range(0, len(data))] X = numpy.reshape(X, (len(X), 1)) y", "header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X = [i for i in range(0,", "= pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X =" ]
[ "from distutils.core import setup setup( name='tensorcv', version=0.1, packages=['tensorcv'], entry_points=\"\"\" [console_scripts] tcv=tensorcv:cli \"\"\" )", "<reponame>afcarl/tensorcv<gh_stars>1-10 from distutils.core import setup setup( name='tensorcv', version=0.1, packages=['tensorcv'], entry_points=\"\"\" [console_scripts] tcv=tensorcv:cli \"\"\"" ]
[ "Call the base class constructor with the parameters it needs super(EarlyTerminationException, self).__init__(message) self.metrics", "# For BO early Termination class EarlyTerminationException(Exception): def __init__(self, message, metrics): # Call", "# Call the base class constructor with the parameters it needs super(EarlyTerminationException, self).__init__(message)", "BO early Termination class EarlyTerminationException(Exception): def __init__(self, message, metrics): # Call the base", "early Termination class EarlyTerminationException(Exception): def __init__(self, message, metrics): # Call the base class", "__init__(self, message, metrics): # Call the base class constructor with the parameters it", "message, metrics): # Call the base class constructor with the parameters it needs", "the base class constructor with the parameters it needs super(EarlyTerminationException, self).__init__(message) self.metrics =", "Termination class EarlyTerminationException(Exception): def __init__(self, message, metrics): # Call the base class constructor", "metrics): # Call the base class constructor with the parameters it needs super(EarlyTerminationException,", "class EarlyTerminationException(Exception): def __init__(self, message, metrics): # Call the base class constructor with", "def __init__(self, message, metrics): # Call the base class constructor with the parameters", "EarlyTerminationException(Exception): def __init__(self, message, metrics): # Call the base class constructor with the", "base class constructor with the parameters it needs super(EarlyTerminationException, self).__init__(message) self.metrics = metrics", "For BO early Termination class EarlyTerminationException(Exception): def __init__(self, message, metrics): # Call the" ]
[ "off as \"two 1s\" or 21. #21 is read off as \"one 2,", "21, 1211, 111221, ... # #1 is read off as \"one 1\" or", "# #1 is read off as \"one 1\" or 11. #11 is read", "read off as \"two 1s\" or 21. #21 is read off as \"one", "str \"\"\" z=[\"0\",\"1\"] for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])): if", "21. #21 is read off as \"one 2, then one 1\" or 1211.", "is read off as \"one 2, then one 1\" or 1211. #Given an", "#The count-and-say sequence is the sequence of integers beginning as follows: #1, 11,", "xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else: s+=str(c)+n n=z[i][j] c=1 s+=str(c)+n", "i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else: s+=str(c)+n n=z[i][j]", "read off as \"one 2, then one 1\" or 1211. #Given an integer", "s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else: s+=str(c)+n n=z[i][j] c=1 s+=str(c)+n z.append(s)", "n: int :rtype: str \"\"\" z=[\"0\",\"1\"] for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j", "then one 1\" or 1211. #Given an integer n, generate the nth sequence.", "follows: #1, 11, 21, 1211, 111221, ... # #1 is read off as", "read off as \"one 1\" or 11. #11 is read off as \"two", "is the sequence of integers beginning as follows: #1, 11, 21, 1211, 111221,", "off as \"one 2, then one 1\" or 1211. #Given an integer n,", "as \"two 1s\" or 21. #21 is read off as \"one 2, then", "as \"one 2, then one 1\" or 1211. #Given an integer n, generate", "Solution(object): def countAndSay(self, A): \"\"\" :type n: int :rtype: str \"\"\" z=[\"0\",\"1\"] for", ":type n: int :rtype: str \"\"\" z=[\"0\",\"1\"] for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for", "beginning as follows: #1, 11, 21, 1211, 111221, ... # #1 is read", "j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else: s+=str(c)+n n=z[i][j] c=1 s+=str(c)+n z.append(s) return z[A]", "sequence is the sequence of integers beginning as follows: #1, 11, 21, 1211,", "2, then one 1\" or 1211. #Given an integer n, generate the nth", "is read off as \"two 1s\" or 21. #21 is read off as", "#1, 11, 21, 1211, 111221, ... # #1 is read off as \"one", "def countAndSay(self, A): \"\"\" :type n: int :rtype: str \"\"\" z=[\"0\",\"1\"] for i", "11, 21, 1211, 111221, ... # #1 is read off as \"one 1\"", "class Solution(object): def countAndSay(self, A): \"\"\" :type n: int :rtype: str \"\"\" z=[\"0\",\"1\"]", "as \"one 1\" or 11. #11 is read off as \"two 1s\" or", "A): \"\"\" :type n: int :rtype: str \"\"\" z=[\"0\",\"1\"] for i in xrange(1,A+1):", "z=[\"0\",\"1\"] for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else:", "integer n, generate the nth sequence. class Solution(object): def countAndSay(self, A): \"\"\" :type", "countAndSay(self, A): \"\"\" :type n: int :rtype: str \"\"\" z=[\"0\",\"1\"] for i in", "the sequence of integers beginning as follows: #1, 11, 21, 1211, 111221, ...", "for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else: s+=str(c)+n", "or 1211. #Given an integer n, generate the nth sequence. class Solution(object): def", ":rtype: str \"\"\" z=[\"0\",\"1\"] for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])):", "1211, 111221, ... # #1 is read off as \"one 1\" or 11.", "#21 is read off as \"one 2, then one 1\" or 1211. #Given", "in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else: s+=str(c)+n n=z[i][j] c=1", "as follows: #1, 11, 21, 1211, 111221, ... # #1 is read off", "count-and-say sequence is the sequence of integers beginning as follows: #1, 11, 21,", "1\" or 1211. #Given an integer n, generate the nth sequence. class Solution(object):", "n, generate the nth sequence. class Solution(object): def countAndSay(self, A): \"\"\" :type n:", "... # #1 is read off as \"one 1\" or 11. #11 is", "1\" or 11. #11 is read off as \"two 1s\" or 21. #21", "of integers beginning as follows: #1, 11, 21, 1211, 111221, ... # #1", "111221, ... # #1 is read off as \"one 1\" or 11. #11", "1s\" or 21. #21 is read off as \"one 2, then one 1\"", "int :rtype: str \"\"\" z=[\"0\",\"1\"] for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in", "sequence. class Solution(object): def countAndSay(self, A): \"\"\" :type n: int :rtype: str \"\"\"", "\"one 2, then one 1\" or 1211. #Given an integer n, generate the", "\"two 1s\" or 21. #21 is read off as \"one 2, then one", "\"one 1\" or 11. #11 is read off as \"two 1s\" or 21.", "the nth sequence. class Solution(object): def countAndSay(self, A): \"\"\" :type n: int :rtype:", "an integer n, generate the nth sequence. class Solution(object): def countAndSay(self, A): \"\"\"", "#11 is read off as \"two 1s\" or 21. #21 is read off", "1211. #Given an integer n, generate the nth sequence. class Solution(object): def countAndSay(self,", "off as \"one 1\" or 11. #11 is read off as \"two 1s\"", "or 21. #21 is read off as \"one 2, then one 1\" or", "generate the nth sequence. class Solution(object): def countAndSay(self, A): \"\"\" :type n: int", "one 1\" or 1211. #Given an integer n, generate the nth sequence. class", "\"\"\" :type n: int :rtype: str \"\"\" z=[\"0\",\"1\"] for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1", "#Given an integer n, generate the nth sequence. class Solution(object): def countAndSay(self, A):", "\"\"\" z=[\"0\",\"1\"] for i in xrange(1,A+1): s,n,c=\"\",z[i][0],1 for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1", "is read off as \"one 1\" or 11. #11 is read off as", "sequence of integers beginning as follows: #1, 11, 21, 1211, 111221, ... #", "11. #11 is read off as \"two 1s\" or 21. #21 is read", "integers beginning as follows: #1, 11, 21, 1211, 111221, ... # #1 is", "nth sequence. class Solution(object): def countAndSay(self, A): \"\"\" :type n: int :rtype: str", "for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else: s+=str(c)+n n=z[i][j] c=1 s+=str(c)+n z.append(s) return", "or 11. #11 is read off as \"two 1s\" or 21. #21 is", "#1 is read off as \"one 1\" or 11. #11 is read off" ]
[ "shape has fixed length here. if self.shape.sym_val is None: shape = tuple([get_new_symbol() for", "InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): # We can't", "(event probabilities). The first N - 1 dimensions specifies distributions, the last dimension", "from a normal distribution. .. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number", "normal distribution. .. math:: p(x) = \\frac{1}{high - low} for a real number", "0 for k = 0,..., K-1. mean: const<f32>, optional The mean (center) of", "def type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns", "to 1. seed: const<i32>, optional Seed to create a reproducible sequence of values", "target output shape filled with random values. See Also -------- random_categorical, random_normal, random_uniform", "K-1. mean: const<f32>, optional The mean (center) of the normal distribution. Defaults to", ":math:`x`. Parameters ---------- shape: <K, i32>, required Target output tensor shape. K is", "last dimension represents a vector of probabilities. mode: const<str>, optional One of ['logits',", "filled with random values. See Also -------- random_categorical, random_normal, random_uniform \"\"\" ) class", "return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\"", "(c) 2020, Apple Inc. All rights reserved. # # Use of this source", "default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op(", "of values across multiple invokes. Returns ------- <*D_in[:-1] + [size], T>, a tensor", "from ._op_reqs import * \"\"\" Random Op Superclass \"\"\" class RandomDistribution(Operation): input_spec =", "Defaults to 1. seed: const<i32>, optional Seed to create a reproducible sequence of", ") + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random", "f(k) = \\begin{cases}1-p &\\text{if } k = 0\\\\ p &\\text{if } k =", "Random Op Superclass \"\"\" class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution,", "with random values. See Also -------- random_categorical, random_bernoulli, random_uniform \"\"\" ) class random_normal(RandomDistribution):", "optional Upper boundary of the output interval (exclusive). Defaults to 1.0. seed: const<i32>,", "has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length here. if", "See Also -------- random_categorical, random_bernoulli, random_normal \"\"\" ) class random_uniform(RandomDistribution): input_spec = (", "= ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec", "See Also -------- random_categorical, random_bernoulli, random_uniform \"\"\" ) class random_normal(RandomDistribution): input_spec = (", "a normal distribution. .. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number :math:`x`.", "= 0\\\\ p &\\text{if } k = 1\\end{cases} for :math:`k` in :math:`\\{0, 1\\}`.", "values from a normal distribution. .. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real", "output shape filled with random values. See Also -------- random_categorical, random_normal, random_uniform \"\"\"", "with random values from a normal distribution. .. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for", "> 0 for k = 0,..., K-1. mean: const<f32>, optional The mean (center)", "= \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number :math:`x`. Parameters ---------- shape: <K, i32>, required", "random_categorical, random_bernoulli, random_uniform \"\"\" ) class random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True,", "shape filled with random values. See Also -------- random_categorical, random_bernoulli, random_normal \"\"\" )", "represents a vector of probabilities. mode: const<str>, optional One of ['logits', 'probs']. Defaults", "\"\"\" class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self):", "output tensor shape. K is the rank of the output tensor. shape[k] >", "a BSD-3-clause license that can be # found in the LICENSE.txt file or", "to draw. Defaults to 1. seed: const<i32>, optional Seed to create a reproducible", "= 1\\end{cases} for :math:`k` in :math:`\\{0, 1\\}`. Parameters ---------- shape: <K, i32>, required", "code is governed by a BSD-3-clause license that can be # found in", "filled with random values. See Also -------- random_categorical, random_bernoulli, random_uniform \"\"\" ) class", "return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length here. if self.shape.sym_val is None:", "across multiple invokes. Returns ------- <*, T>, a tensor of given target output", "doc_str=r\"\"\" Returns a tensor with specified shape with random values from a normal", "tensor. shape[k] > 0 for k = 0,..., K-1. mean: const<f32>, optional The", "probs (event probabilities). The first N - 1 dimensions specifies distributions, the last", "normal distribution. .. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number :math:`x`. Parameters", "1\\end{cases} for :math:`k` in :math:`\\{0, 1\\}`. Parameters ---------- shape: <K, i32>, required Target", "input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec )", "super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random values from a categorical distribution. Parameters ----------", "0.5. seed: const<i32>, optional Seed to create a reproducible sequence of values across", "one of logits (event log-probabilities) or probs (event probabilities). The first N -", "shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns a", "InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def", "The probability of sampling 1. Defaults to 0.5. seed: const<i32>, optional Seed to", "reproducible sequence of values across multiple invokes. Returns ------- <*, T>, a tensor", "K-1. prob: const<f32>, optional The probability of sampling 1. Defaults to 0.5. seed:", "shape with random values from a normal distribution. .. math:: p(x) = \\frac{1}{high", "of the output tensor. shape[k] > 0 for k = 0,..., K-1. low:", "from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs import *", "from a Bernoulli distribution. .. math:: f(k) = \\begin{cases}1-p &\\text{if } k =", "a categorical distribution. Parameters ---------- shape: <*D_in, T> N-dimensional tensor, one of logits", ".. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number :math:`x`. Parameters ---------- shape:", "output tensor. shape[k] > 0 for k = 0,..., K-1. mean: const<f32>, optional", "const<i32>, optional Number of samples to draw. Defaults to 1. seed: const<i32>, optional", "the output tensor. shape[k] > 0 for k = 0,..., K-1. mean: const<f32>,", "default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns", "1. Defaults to 0.5. seed: const<i32>, optional Seed to create a reproducible sequence", "random values from a normal distribution. .. math:: p(x) = \\frac{1}{high - low}", "prob: const<f32>, optional The probability of sampling 1. Defaults to 0.5. seed: const<i32>,", "seed: const<i32>, optional Seed to create a reproducible sequence of values across multiple", "self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random values from a categorical distribution. Parameters ---------- shape:", "with specified shape with random values from a normal distribution. .. math:: f(x)", "-------- random_categorical, random_normal, random_uniform \"\"\" ) class random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(),", "distribution. .. math:: f(k) = \\begin{cases}1-p &\\text{if } k = 0\\\\ p &\\text{if", "distribution. Parameters ---------- shape: <*D_in, T> N-dimensional tensor, one of logits (event log-probabilities)", "InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs): super(random_categorical,", "can be # found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic", "K is the rank of the output tensor. shape[k] > 0 for k", "to 1.0. seed: const<i32>, optional Seed to create a reproducible sequence of values", "with specified shape with random values from a normal distribution. .. math:: p(x)", "to create a reproducible sequence of values across multiple invokes. Returns ------- <*,", "class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), )", "optional The standard deviation (width) of the normal distribution. Defaults to 1.0. seed:", "the normal distribution. Defaults to 0.0. stddev: const<f32>, optional The standard deviation (width)", "for k = 0,..., K-1. mean: const<f32>, optional The mean (center) of the", "We can't infer any shape if shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),))", "Defaults to 0.5. seed: const<i32>, optional Seed to create a reproducible sequence of", "The mean (center) of the normal distribution. Defaults to 0.0. stddev: const<f32>, optional", "distribution. Defaults to 1.0. seed: const<i32>, optional Seed to create a reproducible sequence", "random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), )", "seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\"", "default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape =", "get_new_variadic_symbol from ._op_reqs import * \"\"\" Random Op Superclass \"\"\" class RandomDistribution(Operation): input_spec", ") def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random values from a", "= ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def", "'probs']. Defaults to 'logits'. size: const<i32>, optional Number of samples to draw. Defaults", "samples to draw. Defaults to 1. seed: const<i32>, optional Seed to create a", "from a normal distribution. .. math:: p(x) = \\frac{1}{high - low} for a", "the normal distribution. Defaults to 1.0. seed: const<i32>, optional Seed to create a", "Defaults to 0.0. stddev: const<f32>, optional The standard deviation (width) of the normal", "1\\}`. Parameters ---------- shape: <K, i32>, required Target output tensor shape. K is", "math:: p(x) = \\frac{1}{high - low} for a real number :math:`x`. Parameters ----------", "random_normal, random_uniform \"\"\" ) class random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5),", "Also -------- random_categorical, random_bernoulli, random_normal \"\"\" ) class random_uniform(RandomDistribution): input_spec = ( InputSpec(", "**kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random values from a categorical distribution. Parameters", "of the output tensor. shape[k] > 0 for k = 0,..., K-1. mean:", "mean (center) of the normal distribution. Defaults to 0.0. stddev: const<f32>, optional The", "2020, Apple Inc. All rights reserved. # # Use of this source code", "default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_uniform,", "( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec )", "is governed by a BSD-3-clause license that can be # found in the", ":math:`\\{0, 1\\}`. Parameters ---------- shape: <K, i32>, required Target output tensor shape. K", "# We can't infer any shape if shape has variable length. return types.tensor(types.fp32,", "__init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32,", "fixed length here. if self.shape.sym_val is None: shape = tuple([get_new_symbol() for _ in", "of the normal distribution. Defaults to 1.0. seed: const<i32>, optional Seed to create", "seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\"", "random_categorical, random_normal, random_uniform \"\"\" ) class random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True,", "- low} for a real number :math:`x`. Parameters ---------- shape: <K, i32>, required", "of given target output shape filled with random values. See Also -------- random_bernoulli,", ") class random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True,", "Random Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with", "specified shape with random values from a normal distribution. .. math:: f(x) =", "<*, T>, a tensor of given target output shape filled with random values.", "= self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns a tensor with", "\"\"\" Random Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns a tensor with specified shape", "shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist()))", "= 0,..., K-1. prob: const<f32>, optional The probability of sampling 1. Defaults to", "distribution. .. math:: p(x) = \\frac{1}{high - low} for a real number :math:`x`.", "filled with random values. See Also -------- random_bernoulli, random_normal, random_uniform \"\"\" ) class", "0,..., K-1. prob: const<f32>, optional The probability of sampling 1. Defaults to 0.5.", "of the normal distribution. Defaults to 0.0. stddev: const<f32>, optional The standard deviation", "a tensor with specified shape with random values from a normal distribution. ..", "const<f32>, optional The probability of sampling 1. Defaults to 0.5. seed: const<i32>, optional", "found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from", "[size], T>, a tensor of given target output shape filled with random values.", "(exclusive). Defaults to 1.0. seed: const<i32>, optional Seed to create a reproducible sequence", "k = 0\\\\ p &\\text{if } k = 1\\end{cases} for :math:`k` in :math:`\\{0,", "mode: const<str>, optional One of ['logits', 'probs']. Defaults to 'logits'. size: const<i32>, optional", "output_shape) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random values from", "__init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any", "can't infer any shape if shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) #", "mean: const<f32>, optional The mean (center) of the normal distribution. Defaults to 0.0.", "tensor, one of logits (event log-probabilities) or probs (event probabilities). The first N", "a Bernoulli distribution. .. math:: f(k) = \\begin{cases}1-p &\\text{if } k = 0\\\\", "**kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape)", "def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,) return", "} k = 0\\\\ p &\\text{if } k = 1\\end{cases} for :math:`k` in", "filled with random values. See Also -------- random_categorical, random_bernoulli, random_normal \"\"\" ) class", "multiple invokes. Returns ------- <*D_in[:-1] + [size], T>, a tensor of given target", "random values. See Also -------- random_bernoulli, random_normal, random_uniform \"\"\" ) class random_categorical(Operation): input_spec", "_ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s)", "dimensions specifies distributions, the last dimension represents a vector of probabilities. mode: const<str>,", "output_shape = self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns a tensor", "['logits', 'probs']. Defaults to 'logits'. size: const<i32>, optional Number of samples to draw.", "output tensor. shape[k] > 0 for k = 0,..., K-1. low: const<f32>, optional", "output shape filled with random values. See Also -------- random_categorical, random_bernoulli, random_uniform \"\"\"", "values. See Also -------- random_categorical, random_bernoulli, random_uniform \"\"\" ) class random_normal(RandomDistribution): input_spec =", "mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def", "of sampling 1. Defaults to 0.5. seed: const<i32>, optional Seed to create a", "seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1]", "in :math:`\\{0, 1\\}`. Parameters ---------- shape: <K, i32>, required Target output tensor shape.", "class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if", "0 for k = 0,..., K-1. low: const<f32>, optional Lower boundary of the", "Also -------- random_categorical, random_bernoulli, random_uniform \"\"\" ) class random_normal(RandomDistribution): input_spec = ( InputSpec(", ") class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1),", "infer any shape if shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape", "shape with random values from a Bernoulli distribution. .. math:: f(k) = \\begin{cases}1-p", "to create a reproducible sequence of values across multiple invokes. Returns ------- <*D_in[:-1]", "stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs)", "input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): #", "self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any shape if shape", "The first N - 1 dimensions specifies distributions, the last dimension represents a", "deviation (width) of the normal distribution. Defaults to 1.0. seed: const<i32>, optional Seed", "license that can be # found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause", "this source code is governed by a BSD-3-clause license that can be #", "# Copyright (c) 2020, Apple Inc. All rights reserved. # # Use of", "shape: <K, i32>, required Target output tensor shape. K is the rank of", "random_normal \"\"\" ) class random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True,", "type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any shape if shape has variable", "with random values. See Also -------- random_bernoulli, random_normal, random_uniform \"\"\" ) class random_categorical(Operation):", "Defaults to 0.0. high: const<f32>, optional Upper boundary of the output interval (exclusive).", "to 0.0. stddev: const<f32>, optional The standard deviation (width) of the normal distribution.", "specified shape with random values from a normal distribution. .. math:: p(x) =", "Use of this source code is governed by a BSD-3-clause license that can", "0 for k = 0,..., K-1. prob: const<f32>, optional The probability of sampling", "create a reproducible sequence of values across multiple invokes. Returns ------- <*D_in[:-1] +", "All rights reserved. # # Use of this source code is governed by", "target output shape filled with random values. See Also -------- random_bernoulli, random_normal, random_uniform", ") def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a tensor with specified", "type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns a", "values from a normal distribution. .. math:: p(x) = \\frac{1}{high - low} for", "------- <*D_in[:-1] + [size], T>, a tensor of given target output shape filled", "probabilities). The first N - 1 dimensions specifies distributions, the last dimension represents", "i32>, required Target output tensor shape. K is the rank of the output", "types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns", "input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) +", "draw. Defaults to 1. seed: const<i32>, optional Seed to create a reproducible sequence", "to 0.0. high: const<f32>, optional Upper boundary of the output interval (exclusive). Defaults", "self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\"", "1.0. seed: const<i32>, optional Seed to create a reproducible sequence of values across", "for a real number :math:`x`. Parameters ---------- shape: <K, i32>, required Target output", "random_bernoulli, random_normal \"\"\" ) class random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0),", "for k = 0,..., K-1. prob: const<f32>, optional The probability of sampling 1.", "x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs)", ".. math:: p(x) = \\frac{1}{high - low} for a real number :math:`x`. Parameters", "k = 0,..., K-1. prob: const<f32>, optional The probability of sampling 1. Defaults", "@register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random values from a", "random values. See Also -------- random_categorical, random_normal, random_uniform \"\"\" ) class random_bernoulli(RandomDistribution): input_spec", "Also -------- random_categorical, random_normal, random_uniform \"\"\" ) class random_bernoulli(RandomDistribution): input_spec = ( InputSpec(", "Copyright (c) 2020, Apple Inc. All rights reserved. # # Use of this", "random_normal, random_uniform \"\"\" ) class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True,", "(self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with", "The standard deviation (width) of the normal distribution. Defaults to 1.0. seed: const<i32>,", "values. See Also -------- random_categorical, random_bernoulli, random_normal \"\"\" ) class random_uniform(RandomDistribution): input_spec =", "Inc. All rights reserved. # # Use of this source code is governed", "} k = 1\\end{cases} for :math:`k` in :math:`\\{0, 1\\}`. Parameters ---------- shape: <K,", "K-1. low: const<f32>, optional Lower boundary of the output interval (inclusive). Defaults to", "shape if shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed", "for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op", "const<f32>, optional The standard deviation (width) of the normal distribution. Defaults to 1.0.", "LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import get_new_symbol,", "to 0.5. seed: const<i32>, optional Seed to create a reproducible sequence of values", "Number of samples to draw. Defaults to 1. seed: const<i32>, optional Seed to", "of this source code is governed by a BSD-3-clause license that can be", "sequence of values across multiple invokes. Returns ------- <*, T>, a tensor of", "a tensor with specified shape with random values from a Bernoulli distribution. ..", "types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns a tensor with", "the output tensor. shape[k] > 0 for k = 0,..., K-1. prob: const<f32>,", "sampling 1. Defaults to 0.5. seed: const<i32>, optional Seed to create a reproducible", "const<f32>, optional Lower boundary of the output interval (inclusive). Defaults to 0.0. high:", "# shape has fixed length here. if self.shape.sym_val is None: shape = tuple([get_new_symbol()", "= ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec", "a reproducible sequence of values across multiple invokes. Returns ------- <*, T>, a", "= \\begin{cases}1-p &\\text{if } k = 0\\\\ p &\\text{if } k = 1\\end{cases}", "f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number :math:`x`. Parameters ---------- shape: <K, i32>,", "optional Seed to create a reproducible sequence of values across multiple invokes. Returns", "vector of probabilities. mode: const<str>, optional One of ['logits', 'probs']. Defaults to 'logits'.", "optional Lower boundary of the output interval (inclusive). Defaults to 0.0. high: const<f32>,", "values across multiple invokes. Returns ------- <*, T>, a tensor of given target", "stddev: const<f32>, optional The standard deviation (width) of the normal distribution. Defaults to", "values. See Also -------- random_bernoulli, random_normal, random_uniform \"\"\" ) class random_categorical(Operation): input_spec =", "k = 0,..., K-1. mean: const<f32>, optional The mean (center) of the normal", "random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec", "shape filled with random values. See Also -------- random_categorical, random_normal, random_uniform \"\"\" )", "the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import", "= InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs):", "class random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) +", "random_uniform \"\"\" ) class random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True,", "if shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length", "T>, a tensor of given target output shape filled with random values. See", "by a BSD-3-clause license that can be # found in the LICENSE.txt file", "T> N-dimensional tensor, one of logits (event log-probabilities) or probs (event probabilities). The", "---------- shape: <*D_in, T> N-dimensional tensor, one of logits (event log-probabilities) or probs", "Also -------- random_bernoulli, random_normal, random_uniform \"\"\" ) class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(),", "shape filled with random values. See Also -------- random_bernoulli, random_normal, random_uniform \"\"\" )", "random values. See Also -------- random_categorical, random_bernoulli, random_uniform \"\"\" ) class random_normal(RandomDistribution): input_spec", "size: const<i32>, optional Number of samples to draw. Defaults to 1. seed: const<i32>,", "shape with random values from a normal distribution. .. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}}", "tensor. shape[k] > 0 for k = 0,..., K-1. low: const<f32>, optional Lower", "0,..., K-1. mean: const<f32>, optional The mean (center) of the normal distribution. Defaults", "k = 0,..., K-1. low: const<f32>, optional Lower boundary of the output interval", "doc_str=r\"\"\" Returns a tensor with specified shape with random values from a Bernoulli", "of samples to draw. Defaults to 1. seed: const<i32>, optional Seed to create", "output interval (inclusive). Defaults to 0.0. high: const<f32>, optional Upper boundary of the", "InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def", "of the output tensor. shape[k] > 0 for k = 0,..., K-1. prob:", "low: const<f32>, optional Lower boundary of the output interval (inclusive). Defaults to 0.0.", "( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec )", "values from a Bernoulli distribution. .. math:: f(k) = \\begin{cases}1-p &\\text{if } k", "any_symbolic(self.shape.shape): # We can't infer any shape if shape has variable length. return", "probability of sampling 1. Defaults to 0.5. seed: const<i32>, optional Seed to create", "high: const<f32>, optional Upper boundary of the output interval (exclusive). Defaults to 1.0.", "shape filled with random values. See Also -------- random_categorical, random_bernoulli, random_uniform \"\"\" )", "input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) +", "required Target output tensor shape. K is the rank of the output tensor.", "number :math:`x`. Parameters ---------- shape: <K, i32>, required Target output tensor shape. K", "const<f32>, optional The mean (center) of the normal distribution. Defaults to 0.0. stddev:", "\\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number :math:`x`. Parameters ---------- shape: <K, i32>, required Target", "optional Number of samples to draw. Defaults to 1. seed: const<i32>, optional Seed", "or probs (event probabilities). The first N - 1 dimensions specifies distributions, the", "random_bernoulli, random_uniform \"\"\" ) class random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0),", "+ [size], T>, a tensor of given target output shape filled with random", "mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs):", "(inclusive). Defaults to 0.0. high: const<f32>, optional Upper boundary of the output interval", ") + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a", "is None: shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return", "@register_op( doc_str=r\"\"\" Returns random values from a categorical distribution. Parameters ---------- shape: <*D_in,", "of the output interval (inclusive). Defaults to 0.0. high: const<f32>, optional Upper boundary", "optional The probability of sampling 1. Defaults to 0.5. seed: const<i32>, optional Seed", "multiple invokes. Returns ------- <*, T>, a tensor of given target output shape", "tensor of given target output shape filled with random values. See Also --------", "1. seed: const<i32>, optional Seed to create a reproducible sequence of values across", "<*D_in[:-1] + [size], T>, a tensor of given target output shape filled with", "self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns a tensor with specified", "a reproducible sequence of values across multiple invokes. Returns ------- <*D_in[:-1] + [size],", "normal distribution. Defaults to 0.0. stddev: const<f32>, optional The standard deviation (width) of", "super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op(", "logits (event log-probabilities) or probs (event probabilities). The first N - 1 dimensions", "<K, i32>, required Target output tensor shape. K is the rank of the", "is the rank of the output tensor. shape[k] > 0 for k =", "default=-1), ) def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1] +", "\"\"\" ) class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True,", "default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self):", "+ RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random values", "= 0,..., K-1. mean: const<f32>, optional The mean (center) of the normal distribution.", "or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from", "tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random", "of the output interval (exclusive). Defaults to 1.0. seed: const<i32>, optional Seed to", "for k = 0,..., K-1. low: const<f32>, optional Lower boundary of the output", "random values from a normal distribution. .. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a", "BSD-3-clause license that can be # found in the LICENSE.txt file or at", "any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs import * \"\"\" Random Op", "tensor with specified shape with random values from a normal distribution. .. math::", "N - 1 dimensions specifies distributions, the last dimension represents a vector of", "Op Superclass \"\"\" class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs)", "\"\"\" @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random values from", "import any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs import * \"\"\" Random", "governed by a BSD-3-clause license that can be # found in the LICENSE.txt", "given target output shape filled with random values. See Also -------- random_bernoulli, random_normal,", "range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s) \"\"\" @register_op(", "first N - 1 dimensions specifies distributions, the last dimension represents a vector", "random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), )", "= InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): # We", "the last dimension represents a vector of probabilities. mode: const<str>, optional One of", "optional One of ['logits', 'probs']. Defaults to 'logits'. size: const<i32>, optional Number of", "high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_uniform, self).__init__(**kwargs)", "probabilities. mode: const<str>, optional One of ['logits', 'probs']. Defaults to 'logits'. size: const<i32>,", "with random values from a normal distribution. .. math:: p(x) = \\frac{1}{high -", "default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op(", "**kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random", "values across multiple invokes. Returns ------- <*D_in[:-1] + [size], T>, a tensor of", "Superclass \"\"\" class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def", "\\begin{cases}1-p &\\text{if } k = 0\\\\ p &\\text{if } k = 1\\end{cases} for", "\"\"\" ) class random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1),", "from a categorical distribution. Parameters ---------- shape: <*D_in, T> N-dimensional tensor, one of", "size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape", "shape[k] > 0 for k = 0,..., K-1. mean: const<f32>, optional The mean", "for :math:`k` in :math:`\\{0, 1\\}`. Parameters ---------- shape: <K, i32>, required Target output", "low} for a real number :math:`x`. Parameters ---------- shape: <K, i32>, required Target", "._op_reqs import * \"\"\" Random Op Superclass \"\"\" class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),)", "import * \"\"\" Random Op Superclass \"\"\" class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def", "input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self,", "of probabilities. mode: const<str>, optional One of ['logits', 'probs']. Defaults to 'logits'. size:", "Target output tensor shape. K is the rank of the output tensor. shape[k]", "sequence of values across multiple invokes. Returns ------- <*D_in[:-1] + [size], T>, a", "random_bernoulli, random_normal, random_uniform \"\"\" ) class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"),", "target output shape filled with random values. See Also -------- random_categorical, random_bernoulli, random_uniform", "------- <*, T>, a tensor of given target output shape filled with random", "the rank of the output tensor. shape[k] > 0 for k = 0,...,", "a normal distribution. .. math:: p(x) = \\frac{1}{high - low} for a real", "Returns a tensor with specified shape with random values from a normal distribution.", "+ (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape", "target output shape filled with random values. See Also -------- random_categorical, random_bernoulli, random_normal", "get_new_symbol, get_new_variadic_symbol from ._op_reqs import * \"\"\" Random Op Superclass \"\"\" class RandomDistribution(Operation):", "rank of the output tensor. shape[k] > 0 for k = 0,..., K-1.", "p &\\text{if } k = 1\\end{cases} for :math:`k` in :math:`\\{0, 1\\}`. Parameters ----------", "**kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any shape", "with random values. See Also -------- random_categorical, random_bernoulli, random_normal \"\"\" ) class random_uniform(RandomDistribution):", "dimension represents a vector of probabilities. mode: const<str>, optional One of ['logits', 'probs'].", "# Use of this source code is governed by a BSD-3-clause license that", "doc_str=r\"\"\" Returns random values from a categorical distribution. Parameters ---------- shape: <*D_in, T>", "of ['logits', 'probs']. Defaults to 'logits'. size: const<i32>, optional Number of samples to", "Defaults to 'logits'. size: const<i32>, optional Number of samples to draw. Defaults to", "in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil", "One of ['logits', 'probs']. Defaults to 'logits'. size: const<i32>, optional Number of samples", "RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random values from", "(event log-probabilities) or probs (event probabilities). The first N - 1 dimensions specifies", "See Also -------- random_categorical, random_normal, random_uniform \"\"\" ) class random_bernoulli(RandomDistribution): input_spec = (", "Returns ------- <*, T>, a tensor of given target output shape filled with", "shape[k] > 0 for k = 0,..., K-1. prob: const<f32>, optional The probability", "Returns ------- <*D_in[:-1] + [size], T>, a tensor of given target output shape", "reserved. # # Use of this source code is governed by a BSD-3-clause", "specified shape with random values from a Bernoulli distribution. .. math:: f(k) =", "output interval (exclusive). Defaults to 1.0. seed: const<i32>, optional Seed to create a", "def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any shape if shape has", "random values. See Also -------- random_categorical, random_bernoulli, random_normal \"\"\" ) class random_uniform(RandomDistribution): input_spec", "with random values from a Bernoulli distribution. .. math:: f(k) = \\begin{cases}1-p &\\text{if", "distribution. Defaults to 0.0. stddev: const<f32>, optional The standard deviation (width) of the", "variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length here. if self.shape.sym_val", "Seed to create a reproducible sequence of values across multiple invokes. Returns -------", "> 0 for k = 0,..., K-1. prob: const<f32>, optional The probability of", "const<i32>, optional Seed to create a reproducible sequence of values across multiple invokes.", "def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer", "k = 1\\end{cases} for :math:`k` in :math:`\\{0, 1\\}`. Parameters ---------- shape: <K, i32>,", "Upper boundary of the output interval (exclusive). Defaults to 1.0. seed: const<i32>, optional", "coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs import * \"\"\" Random Op Superclass \"\"\"", "random values from a Bernoulli distribution. .. math:: f(k) = \\begin{cases}1-p &\\text{if }", "(width) of the normal distribution. Defaults to 1.0. seed: const<i32>, optional Seed to", "random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def", "self.shape.sym_val is None: shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape)", "that can be # found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from", "values. See Also -------- random_categorical, random_normal, random_uniform \"\"\" ) class random_bernoulli(RandomDistribution): input_spec =", "class random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1),", "super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random values", "Apple Inc. All rights reserved. # # Use of this source code is", "log-probabilities) or probs (event probabilities). The first N - 1 dimensions specifies distributions,", "the output interval (exclusive). Defaults to 1.0. seed: const<i32>, optional Seed to create", "be # found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import", "https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs import", "Returns random values from a categorical distribution. Parameters ---------- shape: <*D_in, T> N-dimensional", "shape: <*D_in, T> N-dimensional tensor, one of logits (event log-probabilities) or probs (event", "of given target output shape filled with random values. See Also -------- random_categorical,", "<*D_in, T> N-dimensional tensor, one of logits (event log-probabilities) or probs (event probabilities).", "RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a tensor with", "optional The mean (center) of the normal distribution. Defaults to 0.0. stddev: const<f32>,", "shape[k] > 0 for k = 0,..., K-1. low: const<f32>, optional Lower boundary", "return types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random", "interval (inclusive). Defaults to 0.0. high: const<f32>, optional Upper boundary of the output", "random_categorical, random_bernoulli, random_normal \"\"\" ) class random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True,", "output tensor. shape[k] > 0 for k = 0,..., K-1. prob: const<f32>, optional", "in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s) \"\"\"", "source code is governed by a BSD-3-clause license that can be # found", "here. if self.shape.sym_val is None: shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return", "(get_new_variadic_symbol(),)) # shape has fixed length here. if self.shape.sym_val is None: shape =", "tensor shape. K is the rank of the output tensor. shape[k] > 0", "length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length here. if self.shape.sym_val is", "types.tensor(types.fp32, output_shape) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random values", "shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self,", "random values from a categorical distribution. Parameters ---------- shape: <*D_in, T> N-dimensional tensor,", "invokes. Returns ------- <*D_in[:-1] + [size], T>, a tensor of given target output", "---------- shape: <K, i32>, required Target output tensor shape. K is the rank", "has fixed length here. if self.shape.sym_val is None: shape = tuple([get_new_symbol() for _", "Parameters ---------- shape: <*D_in, T> N-dimensional tensor, one of logits (event log-probabilities) or", "a vector of probabilities. mode: const<str>, optional One of ['logits', 'probs']. Defaults to", ":math:`k` in :math:`\\{0, 1\\}`. Parameters ---------- shape: <K, i32>, required Target output tensor", "tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns a tensor with specified", "math:: f(k) = \\begin{cases}1-p &\\text{if } k = 0\\\\ p &\\text{if } k", "= tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\"", "-------- random_categorical, random_bernoulli, random_normal \"\"\" ) class random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(),", "const<str>, optional One of ['logits', 'probs']. Defaults to 'logits'. size: const<i32>, optional Number", "math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number :math:`x`. Parameters ---------- shape: <K,", "class random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1),", "given target output shape filled with random values. See Also -------- random_categorical, random_normal,", "__init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with", "shape. K is the rank of the output tensor. shape[k] > 0 for", ") def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,)", "const<f32>, optional Upper boundary of the output interval (exclusive). Defaults to 1.0. seed:", "import get_new_symbol, get_new_variadic_symbol from ._op_reqs import * \"\"\" Random Op Superclass \"\"\" class", "reproducible sequence of values across multiple invokes. Returns ------- <*D_in[:-1] + [size], T>,", "a tensor of given target output shape filled with random values. See Also", "coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs import * \"\"\"", "self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random values from", "boundary of the output interval (exclusive). Defaults to 1.0. seed: const<i32>, optional Seed", "&\\text{if } k = 0\\\\ p &\\text{if } k = 1\\end{cases} for :math:`k`", "-------- random_bernoulli, random_normal, random_uniform \"\"\" ) class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True,", "super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any shape if", "Parameters ---------- shape: <K, i32>, required Target output tensor shape. K is the", "Returns a tensor with specified shape with random values from a Bernoulli distribution.", "invokes. Returns ------- <*, T>, a tensor of given target output shape filled", "Lower boundary of the output interval (inclusive). Defaults to 0.0. high: const<f32>, optional", "tensor. shape[k] > 0 for k = 0,..., K-1. prob: const<f32>, optional The", "of logits (event log-probabilities) or probs (event probabilities). The first N - 1", "default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns", "file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol", "RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape):", "&\\text{if } k = 1\\end{cases} for :math:`k` in :math:`\\{0, 1\\}`. Parameters ---------- shape:", "= \\frac{1}{high - low} for a real number :math:`x`. Parameters ---------- shape: <K,", "1 dimensions specifies distributions, the last dimension represents a vector of probabilities. mode:", "any shape if shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has", ") class random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), )", "(center) of the normal distribution. Defaults to 0.0. stddev: const<f32>, optional The standard", "# # Use of this source code is governed by a BSD-3-clause license", "def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a tensor with specified shape", "0,..., K-1. low: const<f32>, optional Lower boundary of the output interval (inclusive). Defaults", "low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs):", "return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) \"\"\" Random Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns a tensor", "to 'logits'. size: const<i32>, optional Number of samples to draw. Defaults to 1.", "\"\"\" Random Op Superclass \"\"\" class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs):", "+ RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns a tensor", "shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self,", "random_uniform \"\"\" ) class random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True,", "distributions, the last dimension represents a vector of probabilities. mode: const<str>, optional One", "default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal,", "None: shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32,", "# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic", "\\frac{1}{high - low} for a real number :math:`x`. Parameters ---------- shape: <K, i32>,", "create a reproducible sequence of values across multiple invokes. Returns ------- <*, T>,", "( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self,", "the output tensor. shape[k] > 0 for k = 0,..., K-1. low: const<f32>,", "distribution. .. math:: f(x) = \\frac{\\exp(-x^2/2)}{\\sqrt{2\\pi}} for a real number :math:`x`. Parameters ----------", "normal distribution. Defaults to 1.0. seed: const<i32>, optional Seed to create a reproducible", "types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length here. if self.shape.sym_val is None: shape", ") class random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True,", "def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random values from a categorical", "= 0,..., K-1. low: const<f32>, optional Lower boundary of the output interval (inclusive).", "InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs):", "a real number :math:`x`. Parameters ---------- shape: <K, i32>, required Target output tensor", "0\\\\ p &\\text{if } k = 1\\end{cases} for :math:`k` in :math:`\\{0, 1\\}`. Parameters", "- 1 dimensions specifies distributions, the last dimension represents a vector of probabilities.", "if any_symbolic(self.shape.shape): # We can't infer any shape if shape has variable length.", "-------- random_categorical, random_bernoulli, random_uniform \"\"\" ) class random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(),", "\"\"\" ) class random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0),", "boundary of the output interval (inclusive). Defaults to 0.0. high: const<f32>, optional Upper", "Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random values", "if self.shape.sym_val is None: shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32,", "with specified shape with random values from a Bernoulli distribution. .. math:: f(k)", "tensor with specified shape with random values from a Bernoulli distribution. .. math::", "real number :math:`x`. Parameters ---------- shape: <K, i32>, required Target output tensor shape.", "0.0. stddev: const<f32>, optional The standard deviation (width) of the normal distribution. Defaults", "given target output shape filled with random values. See Also -------- random_categorical, random_bernoulli,", "at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs", "N-dimensional tensor, one of logits (event log-probabilities) or probs (event probabilities). The first", "prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs)", "p(x) = \\frac{1}{high - low} for a real number :math:`x`. Parameters ---------- shape:", "the output interval (inclusive). Defaults to 0.0. high: const<f32>, optional Upper boundary of", "See Also -------- random_bernoulli, random_normal, random_uniform \"\"\" ) class random_categorical(Operation): input_spec = InputSpec(", "interval (exclusive). Defaults to 1.0. seed: const<i32>, optional Seed to create a reproducible", "standard deviation (width) of the normal distribution. Defaults to 1.0. seed: const<i32>, optional", "rights reserved. # # Use of this source code is governed by a", "shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli,", "output shape filled with random values. See Also -------- random_bernoulli, random_normal, random_uniform \"\"\"", "Defaults to 1.0. seed: const<i32>, optional Seed to create a reproducible sequence of", "> 0 for k = 0,..., K-1. low: const<f32>, optional Lower boundary of", "* \"\"\" Random Op Superclass \"\"\" class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self,", "Bernoulli distribution. .. math:: f(k) = \\begin{cases}1-p &\\text{if } k = 0\\\\ p", "output shape filled with random values. See Also -------- random_categorical, random_bernoulli, random_normal \"\"\"", "'logits'. size: const<i32>, optional Number of samples to draw. Defaults to 1. seed:", "random_uniform \"\"\" ) class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default=\"logits\"), size=IntInputType(const=True, default=1),", "values from a categorical distribution. Parameters ---------- shape: <*D_in, T> N-dimensional tensor, one", "length here. if self.shape.sym_val is None: shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])])", ".. math:: f(k) = \\begin{cases}1-p &\\text{if } k = 0\\\\ p &\\text{if }", "\"\"\" ) class random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0),", "__init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r\"\"\" Returns random values from a categorical distribution.", "with random values. See Also -------- random_categorical, random_normal, random_uniform \"\"\" ) class random_bernoulli(RandomDistribution):", "Op Implementation(s) \"\"\" @register_op( doc_str=r\"\"\" Returns a tensor with specified shape with random", "across multiple invokes. Returns ------- <*D_in[:-1] + [size], T>, a tensor of given", "from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs import * \"\"\" Random Op Superclass", "of values across multiple invokes. Returns ------- <*, T>, a tensor of given", "0.0. high: const<f32>, optional Upper boundary of the output interval (exclusive). Defaults to", "specifies distributions, the last dimension represents a vector of probabilities. mode: const<str>, optional", "shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length here.", "categorical distribution. Parameters ---------- shape: <*D_in, T> N-dimensional tensor, one of logits (event" ]
[ "pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0,", "black/white mask from the current selection\", \"<NAME>\", \"<NAME>\", \"2020\", \"<Image>/File/MaskImage\", \"RGB*, GRAY*\", [],", "def create_mask_from_selection(image, drawable): # image = gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable,", "image from selection\", \"Create a black/white mask from the current selection\", \"<NAME>\", \"<NAME>\",", "drawable): # image = gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0,", "pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask image", "drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image)", "gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0, 0.0,", "= gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0,", "\"Create a black/white mask from the current selection\", \"<NAME>\", \"<NAME>\", \"2020\", \"<Image>/File/MaskImage\", \"RGB*,", "2, False, True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask image from selection\", \"Create", "0, 100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0,", "change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True, \"\") pdb.gimp_image_undo_group_end(image) register(", "\"create_mask_from_selection\", \"Create mask image from selection\", \"Create a black/white mask from the current", "pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0,", "# image = gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0,", "0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0, 0.0, 0.0) # change to", "1, 0, 100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0,", "0, 0, 100.0, 255.0, 0, 0.0, 0.0) # change to binary pallete pdb.gimp_convert_indexed(image,", "= pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable,", "True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask image from selection\", \"Create a black/white", "pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask image from selection\", \"Create a black/white mask from", "pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0, 0.0, 0.0) # change to binary pallete", "import * def create_mask_from_selection(image, drawable): # image = gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image)", "binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create", "255.0, 0, 0.0, 0.0) # change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2,", "pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask", "register( \"create_mask_from_selection\", \"Create mask image from selection\", \"Create a black/white mask from the", "a black/white mask from the current selection\", \"<NAME>\", \"<NAME>\", \"2020\", \"<Image>/File/MaskImage\", \"RGB*, GRAY*\",", "create_mask_from_selection(image, drawable): # image = gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1,", "255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0, 0.0, 0.0)", "mask image from selection\", \"Create a black/white mask from the current selection\", \"<NAME>\",", "pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0,", "image = gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0,", "* def create_mask_from_selection(image, drawable): # image = gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image)", "0.0, 0.0) # change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True,", "100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0, 0.0,", "0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0, 0.0, 0.0) # change", "0, 100.0, 255.0, 0, 0.0, 0.0) # change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER,", "# change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True, \"\") pdb.gimp_image_undo_group_end(image)", "the current selection\", \"<NAME>\", \"<NAME>\", \"2020\", \"<Image>/File/MaskImage\", \"RGB*, GRAY*\", [], [], create_mask_from_selection, )", "gimpfu import * def create_mask_from_selection(image, drawable): # image = gimp.image_list()[0] # drawable =", "0, 2, False, True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask image from selection\",", "mask from the current selection\", \"<NAME>\", \"<NAME>\", \"2020\", \"<Image>/File/MaskImage\", \"RGB*, GRAY*\", [], [],", "0.0) # change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True, \"\")", "NO_DITHER, 0, 2, False, True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask image from", "# drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0, 0.0, 0.0)", "0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0, 0.0, 0.0) #", "#!/usr/bin/env python from gimpfu import * def create_mask_from_selection(image, drawable): # image = gimp.image_list()[0]", "selection\", \"Create a black/white mask from the current selection\", \"<NAME>\", \"<NAME>\", \"2020\", \"<Image>/File/MaskImage\",", "from the current selection\", \"<NAME>\", \"<NAME>\", \"2020\", \"<Image>/File/MaskImage\", \"RGB*, GRAY*\", [], [], create_mask_from_selection,", "pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0, 0.0, 0.0) # change to binary", "from gimpfu import * def create_mask_from_selection(image, drawable): # image = gimp.image_list()[0] # drawable", "python from gimpfu import * def create_mask_from_selection(image, drawable): # image = gimp.image_list()[0] #", "100.0, 255.0, 0, 0.0, 0.0) # change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0,", "to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\",", "\"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask image from selection\", \"Create a black/white mask", "current selection\", \"<NAME>\", \"<NAME>\", \"2020\", \"<Image>/File/MaskImage\", \"RGB*, GRAY*\", [], [], create_mask_from_selection, ) main()", "from selection\", \"Create a black/white mask from the current selection\", \"<NAME>\", \"<NAME>\", \"2020\",", "0, 0.0, 0.0) # change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False,", "\"Create mask image from selection\", \"Create a black/white mask from the current selection\",", "False, True, \"\") pdb.gimp_image_undo_group_end(image) register( \"create_mask_from_selection\", \"Create mask image from selection\", \"Create a" ]
[ "here. :return: The expected output. \"\"\" if c > w: return 0 leading_ps", "> w: return 0 leading_ps = w // p tables = 0 zeroes_available", "p chips_used = number_of_zeroes_filled * c # first chip drawn from the non-zero", "list(map(int, input().split(' '))) result = solve(c, n, p, w) print(result) return result def", "<reponame>aalekhpatel07/IEEExtreme14 def solve(c, n, p, w): \"\"\" Solve the problem here. :return: The", "first chip drawn from the non-zero entry if exists. chips_used -= (w %", "chips_used // p # now assume no zeroes available already. return tables def", "< zeroes_available: return tables zeroes_available = 1 + chips_used // p # now", "of computation. \"\"\" c, n, p, w = list(map(int, input().split(' '))) result =", "= n - (leading_ps + 1) if w % p == 0: zeroes_available", "n, p, w): \"\"\" Solve the problem here. :return: The expected output. \"\"\"", "sure this driver returns the result. :return: result - Result of computation. \"\"\"", "no zeroes available already. return tables def driver(): \"\"\" Make sure this driver", "p) w -= number_of_zeroes_filled * c tables += number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available:", "w % p chips_used = number_of_zeroes_filled * c # first chip drawn from", "chip drawn from the non-zero entry if exists. chips_used -= (w % p)", "Solve the problem here. :return: The expected output. \"\"\" if c > w:", "% p) w -= number_of_zeroes_filled * c tables += number_of_zeroes_filled if number_of_zeroes_filled <", "// p tables = 0 zeroes_available = n - (leading_ps + 1) if", "+= 1 while zeroes_available > 0: number_of_zeroes_filled = min(w // c, zeroes_available) #", "# first chip drawn from the non-zero entry if exists. chips_used -= (w", "- (leading_ps + 1) if w % p == 0: zeroes_available += 1", "c, zeroes_available) # w % p chips_used = number_of_zeroes_filled * c # first", "\"\"\" Solve the problem here. :return: The expected output. \"\"\" if c >", "leading_ps = w // p tables = 0 zeroes_available = n - (leading_ps", "1 + chips_used // p # now assume no zeroes available already. return", "zeroes_available += 1 while zeroes_available > 0: number_of_zeroes_filled = min(w // c, zeroes_available)", "w) print(result) return result def main(): return driver() if __name__ == '__main__': main()", "drawn from the non-zero entry if exists. chips_used -= (w % p) w", "n - (leading_ps + 1) if w % p == 0: zeroes_available +=", ":return: The expected output. \"\"\" if c > w: return 0 leading_ps =", "number_of_zeroes_filled * c tables += number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return tables zeroes_available", "p, w): \"\"\" Solve the problem here. :return: The expected output. \"\"\" if", "already. return tables def driver(): \"\"\" Make sure this driver returns the result.", "w): \"\"\" Solve the problem here. :return: The expected output. \"\"\" if c", "= w // p tables = 0 zeroes_available = n - (leading_ps +", "w -= number_of_zeroes_filled * c tables += number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return", "Result of computation. \"\"\" c, n, p, w = list(map(int, input().split(' '))) result", "while zeroes_available > 0: number_of_zeroes_filled = min(w // c, zeroes_available) # w %", "The expected output. \"\"\" if c > w: return 0 leading_ps = w", "\"\"\" if c > w: return 0 leading_ps = w // p tables", "w: return 0 leading_ps = w // p tables = 0 zeroes_available =", "if exists. chips_used -= (w % p) w -= number_of_zeroes_filled * c tables", "0 leading_ps = w // p tables = 0 zeroes_available = n -", "w % p == 0: zeroes_available += 1 while zeroes_available > 0: number_of_zeroes_filled", "result - Result of computation. \"\"\" c, n, p, w = list(map(int, input().split('", "== 0: zeroes_available += 1 while zeroes_available > 0: number_of_zeroes_filled = min(w //", "(w % p) w -= number_of_zeroes_filled * c tables += number_of_zeroes_filled if number_of_zeroes_filled", "w = list(map(int, input().split(' '))) result = solve(c, n, p, w) print(result) return", "= 1 + chips_used // p # now assume no zeroes available already.", "def solve(c, n, p, w): \"\"\" Solve the problem here. :return: The expected", "chips_used -= (w % p) w -= number_of_zeroes_filled * c tables += number_of_zeroes_filled", "= list(map(int, input().split(' '))) result = solve(c, n, p, w) print(result) return result", "from the non-zero entry if exists. chips_used -= (w % p) w -=", "available already. return tables def driver(): \"\"\" Make sure this driver returns the", "zeroes_available) # w % p chips_used = number_of_zeroes_filled * c # first chip", "return tables zeroes_available = 1 + chips_used // p # now assume no", "zeroes_available = 1 + chips_used // p # now assume no zeroes available", "this driver returns the result. :return: result - Result of computation. \"\"\" c,", "# now assume no zeroes available already. return tables def driver(): \"\"\" Make", "c, n, p, w = list(map(int, input().split(' '))) result = solve(c, n, p,", "0 zeroes_available = n - (leading_ps + 1) if w % p ==", "zeroes_available: return tables zeroes_available = 1 + chips_used // p # now assume", "* c tables += number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return tables zeroes_available =", "number_of_zeroes_filled * c # first chip drawn from the non-zero entry if exists.", "now assume no zeroes available already. return tables def driver(): \"\"\" Make sure", "= solve(c, n, p, w) print(result) return result def main(): return driver() if", "\"\"\" Make sure this driver returns the result. :return: result - Result of", "assume no zeroes available already. return tables def driver(): \"\"\" Make sure this", "Make sure this driver returns the result. :return: result - Result of computation.", "def driver(): \"\"\" Make sure this driver returns the result. :return: result -", "p == 0: zeroes_available += 1 while zeroes_available > 0: number_of_zeroes_filled = min(w", "driver(): \"\"\" Make sure this driver returns the result. :return: result - Result", "'))) result = solve(c, n, p, w) print(result) return result def main(): return", "zeroes_available > 0: number_of_zeroes_filled = min(w // c, zeroes_available) # w % p", "c tables += number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return tables zeroes_available = 1", "// p # now assume no zeroes available already. return tables def driver():", "exists. chips_used -= (w % p) w -= number_of_zeroes_filled * c tables +=", "* c # first chip drawn from the non-zero entry if exists. chips_used", "+ chips_used // p # now assume no zeroes available already. return tables", "% p chips_used = number_of_zeroes_filled * c # first chip drawn from the", "- Result of computation. \"\"\" c, n, p, w = list(map(int, input().split(' ')))", "-= (w % p) w -= number_of_zeroes_filled * c tables += number_of_zeroes_filled if", "problem here. :return: The expected output. \"\"\" if c > w: return 0", "tables += number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return tables zeroes_available = 1 +", "p tables = 0 zeroes_available = n - (leading_ps + 1) if w", "expected output. \"\"\" if c > w: return 0 leading_ps = w //", "+ 1) if w % p == 0: zeroes_available += 1 while zeroes_available", "if number_of_zeroes_filled < zeroes_available: return tables zeroes_available = 1 + chips_used // p", "p, w = list(map(int, input().split(' '))) result = solve(c, n, p, w) print(result)", "driver returns the result. :return: result - Result of computation. \"\"\" c, n,", "return tables def driver(): \"\"\" Make sure this driver returns the result. :return:", "= min(w // c, zeroes_available) # w % p chips_used = number_of_zeroes_filled *", "w // p tables = 0 zeroes_available = n - (leading_ps + 1)", "if w % p == 0: zeroes_available += 1 while zeroes_available > 0:", "\"\"\" c, n, p, w = list(map(int, input().split(' '))) result = solve(c, n,", "% p == 0: zeroes_available += 1 while zeroes_available > 0: number_of_zeroes_filled =", "return 0 leading_ps = w // p tables = 0 zeroes_available = n", "p # now assume no zeroes available already. return tables def driver(): \"\"\"", "if c > w: return 0 leading_ps = w // p tables =", "result = solve(c, n, p, w) print(result) return result def main(): return driver()", "chips_used = number_of_zeroes_filled * c # first chip drawn from the non-zero entry", "tables def driver(): \"\"\" Make sure this driver returns the result. :return: result", "c > w: return 0 leading_ps = w // p tables = 0", "0: zeroes_available += 1 while zeroes_available > 0: number_of_zeroes_filled = min(w // c,", "solve(c, n, p, w) print(result) return result def main(): return driver() if __name__", "non-zero entry if exists. chips_used -= (w % p) w -= number_of_zeroes_filled *", "// c, zeroes_available) # w % p chips_used = number_of_zeroes_filled * c #", "n, p, w) print(result) return result def main(): return driver() if __name__ ==", "the problem here. :return: The expected output. \"\"\" if c > w: return", "min(w // c, zeroes_available) # w % p chips_used = number_of_zeroes_filled * c", "n, p, w = list(map(int, input().split(' '))) result = solve(c, n, p, w)", "zeroes_available = n - (leading_ps + 1) if w % p == 0:", "> 0: number_of_zeroes_filled = min(w // c, zeroes_available) # w % p chips_used", "number_of_zeroes_filled = min(w // c, zeroes_available) # w % p chips_used = number_of_zeroes_filled", ":return: result - Result of computation. \"\"\" c, n, p, w = list(map(int,", "-= number_of_zeroes_filled * c tables += number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return tables", "computation. \"\"\" c, n, p, w = list(map(int, input().split(' '))) result = solve(c,", "p, w) print(result) return result def main(): return driver() if __name__ == '__main__':", "= number_of_zeroes_filled * c # first chip drawn from the non-zero entry if", "the non-zero entry if exists. chips_used -= (w % p) w -= number_of_zeroes_filled", "returns the result. :return: result - Result of computation. \"\"\" c, n, p,", "(leading_ps + 1) if w % p == 0: zeroes_available += 1 while", "1) if w % p == 0: zeroes_available += 1 while zeroes_available >", "0: number_of_zeroes_filled = min(w // c, zeroes_available) # w % p chips_used =", "# w % p chips_used = number_of_zeroes_filled * c # first chip drawn", "tables = 0 zeroes_available = n - (leading_ps + 1) if w %", "1 while zeroes_available > 0: number_of_zeroes_filled = min(w // c, zeroes_available) # w", "+= number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return tables zeroes_available = 1 + chips_used", "zeroes available already. return tables def driver(): \"\"\" Make sure this driver returns", "tables zeroes_available = 1 + chips_used // p # now assume no zeroes", "the result. :return: result - Result of computation. \"\"\" c, n, p, w", "input().split(' '))) result = solve(c, n, p, w) print(result) return result def main():", "solve(c, n, p, w): \"\"\" Solve the problem here. :return: The expected output.", "number_of_zeroes_filled < zeroes_available: return tables zeroes_available = 1 + chips_used // p #", "output. \"\"\" if c > w: return 0 leading_ps = w // p", "= 0 zeroes_available = n - (leading_ps + 1) if w % p", "entry if exists. chips_used -= (w % p) w -= number_of_zeroes_filled * c", "result. :return: result - Result of computation. \"\"\" c, n, p, w =", "c # first chip drawn from the non-zero entry if exists. chips_used -=", "number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return tables zeroes_available = 1 + chips_used //" ]
[ "expected): res = Ranker(file, update_text) assert res == expected, \"Update failed\\ngot:\\n\" + str(res)", "Ranker(file, update_text) assert res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" +", "+ \"\\nexpected:\\n\" + expected if __name__ == \"__main__\": test_add(\"test_files/empty.json\",\"youssef\",\"{'players': [{'name': 'youssef', 'rank': 0,", "+ str(res) + \"\\nexpected:\\n\" + expected def test_add(file, player, expected): res = add_player(file,", "expected): res = add_player(file, player) assert res == expected, \"Update failed\\ngot:\\n\" + str(res)", "from Ranking.src.Ranker import Ranker, add_player def test_update(file, update_text, expected): res = Ranker(file, update_text)", "test_update(file, update_text, expected): res = Ranker(file, update_text) assert res == expected, \"Update failed\\ngot:\\n\"", "\"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected if __name__ == \"__main__\": test_add(\"test_files/empty.json\",\"youssef\",\"{'players':", "update_text) assert res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected", "failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected if __name__ == \"__main__\": test_add(\"test_files/empty.json\",\"youssef\",\"{'players': [{'name':", "+ str(res) + \"\\nexpected:\\n\" + expected if __name__ == \"__main__\": test_add(\"test_files/empty.json\",\"youssef\",\"{'players': [{'name': 'youssef',", "str(res) + \"\\nexpected:\\n\" + expected def test_add(file, player, expected): res = add_player(file, player)", "res = Ranker(file, update_text) assert res == expected, \"Update failed\\ngot:\\n\" + str(res) +", "import Ranker, add_player def test_update(file, update_text, expected): res = Ranker(file, update_text) assert res", "= add_player(file, player) assert res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\"", "expected def test_add(file, player, expected): res = add_player(file, player) assert res == expected,", "player) assert res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected", "res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected def test_add(file,", "\"\\nexpected:\\n\" + expected def test_add(file, player, expected): res = add_player(file, player) assert res", "+ expected def test_add(file, player, expected): res = add_player(file, player) assert res ==", "test_add(file, player, expected): res = add_player(file, player) assert res == expected, \"Update failed\\ngot:\\n\"", "player, expected): res = add_player(file, player) assert res == expected, \"Update failed\\ngot:\\n\" +", "assert res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected def", "Ranking.src.Ranker import Ranker, add_player def test_update(file, update_text, expected): res = Ranker(file, update_text) assert", "def test_add(file, player, expected): res = add_player(file, player) assert res == expected, \"Update", "expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected if __name__ == \"__main__\":", "update_text, expected): res = Ranker(file, update_text) assert res == expected, \"Update failed\\ngot:\\n\" +", "expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected def test_add(file, player, expected):", "add_player(file, player) assert res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" +", "def test_update(file, update_text, expected): res = Ranker(file, update_text) assert res == expected, \"Update", "Ranker, add_player def test_update(file, update_text, expected): res = Ranker(file, update_text) assert res ==", "add_player def test_update(file, update_text, expected): res = Ranker(file, update_text) assert res == expected,", "res = add_player(file, player) assert res == expected, \"Update failed\\ngot:\\n\" + str(res) +", "res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected if __name__", "== expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected if __name__ ==", "failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected def test_add(file, player, expected): res =", "+ expected if __name__ == \"__main__\": test_add(\"test_files/empty.json\",\"youssef\",\"{'players': [{'name': 'youssef', 'rank': 0, 'points': 0}]}\")", "= Ranker(file, update_text) assert res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\"", "+ \"\\nexpected:\\n\" + expected def test_add(file, player, expected): res = add_player(file, player) assert", "== expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected def test_add(file, player,", "str(res) + \"\\nexpected:\\n\" + expected if __name__ == \"__main__\": test_add(\"test_files/empty.json\",\"youssef\",\"{'players': [{'name': 'youssef', 'rank':", "\"\\nexpected:\\n\" + expected if __name__ == \"__main__\": test_add(\"test_files/empty.json\",\"youssef\",\"{'players': [{'name': 'youssef', 'rank': 0, 'points':", "\"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected def test_add(file, player, expected): res", "assert res == expected, \"Update failed\\ngot:\\n\" + str(res) + \"\\nexpected:\\n\" + expected if" ]
[ "example, expected) = match.groups() result.append( { \"title\": title, \"example\": extract_rows(example), \"expected\": extract_rows(expected), }", "table.strip().split(\"\\n\")] def main(path): result = [] text = path.read_text() for match in re.finditer(SECTION_PATTERN,", "from pathlib import Path import re import context from src.goodies import print_warning SECTION_PATTERN", "---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if not table: return [] return [tuple(row.split(\"|\")) for row", "than matches.') return result if __name__ == \"__main__\": for (i, test_data) in enumerate(main(Path(\"test/examples.md\")),", "sections than matches.') return result if __name__ == \"__main__\": for (i, test_data) in", "pathlib import Path import re import context from src.goodies import print_warning SECTION_PATTERN =", "\"title\": title, \"example\": extract_rows(example), \"expected\": extract_rows(expected), } ) if len(result) < len(re.findall(\"(?m)^### \",", "def main(path): result = [] text = path.read_text() for match in re.finditer(SECTION_PATTERN, text):", "path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if not table: return [] return [tuple(row.split(\"|\")) for", "original path \\| new path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if not table: return", "} ) if len(result) < len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has more sections than", "SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n #### Example\\n original path \\| new name ---\\|--- ((?:.+\\n)*)", "extract_rows(example), \"expected\": extract_rows(expected), } ) if len(result) < len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has", "import print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n #### Example\\n original path \\| new name", "if not table: return [] return [tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")] def main(path):", "[] return [tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")] def main(path): result = [] text", "in re.finditer(SECTION_PATTERN, text): (title, example, expected) = match.groups() result.append( { \"title\": title, \"example\":", "result if __name__ == \"__main__\": for (i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] =", "result = [] text = path.read_text() for match in re.finditer(SECTION_PATTERN, text): (title, example,", "test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] = i print(\"Section {#}: {title}\".format(**test_data)) print(\"Example: {example}\".format(**test_data)) print(\"Expected:", "#### Result\\n original path \\| new path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if not", "src.goodies import print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n #### Example\\n original path \\| new", "len(result) < len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has more sections than matches.') return result", "print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n #### Example\\n original path \\| new name ---\\|---", "matches.') return result if __name__ == \"__main__\": for (i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1):", "(i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] = i print(\"Section {#}: {title}\".format(**test_data)) print(\"Example: {example}\".format(**test_data))", "text): (title, example, expected) = match.groups() result.append( { \"title\": title, \"example\": extract_rows(example), \"expected\":", "original path \\| new name ---\\|--- ((?:.+\\n)*) #### Result\\n original path \\| new", "path.read_text() for match in re.finditer(SECTION_PATTERN, text): (title, example, expected) = match.groups() result.append( {", "import re import context from src.goodies import print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n ####", "name ---\\|--- ((?:.+\\n)*) #### Result\\n original path \\| new path ---\\|--- ((?:.+\\n)*)\"\"\" def", "text = path.read_text() for match in re.finditer(SECTION_PATTERN, text): (title, example, expected) = match.groups()", "match in re.finditer(SECTION_PATTERN, text): (title, example, expected) = match.groups() result.append( { \"title\": title,", "r\"\"\"(?m)^### (.+)\\n #### Example\\n original path \\| new name ---\\|--- ((?:.+\\n)*) #### Result\\n", "table: return [] return [tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")] def main(path): result =", "if len(result) < len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has more sections than matches.') return", "= match.groups() result.append( { \"title\": title, \"example\": extract_rows(example), \"expected\": extract_rows(expected), } ) if", "print_warning('\"examples.md\" has more sections than matches.') return result if __name__ == \"__main__\": for", "if __name__ == \"__main__\": for (i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] = i", "((?:.+\\n)*) #### Result\\n original path \\| new path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if", "not table: return [] return [tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")] def main(path): result", "[] text = path.read_text() for match in re.finditer(SECTION_PATTERN, text): (title, example, expected) =", "{ \"title\": title, \"example\": extract_rows(example), \"expected\": extract_rows(expected), } ) if len(result) < len(re.findall(\"(?m)^###", "path \\| new name ---\\|--- ((?:.+\\n)*) #### Result\\n original path \\| new path", "extract_rows(expected), } ) if len(result) < len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has more sections", "== \"__main__\": for (i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] = i print(\"Section {#}:", "re import context from src.goodies import print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n #### Example\\n", "title, \"example\": extract_rows(example), \"expected\": extract_rows(expected), } ) if len(result) < len(re.findall(\"(?m)^### \", text)):", "has more sections than matches.') return result if __name__ == \"__main__\": for (i,", "return result if __name__ == \"__main__\": for (i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"]", "more sections than matches.') return result if __name__ == \"__main__\": for (i, test_data)", "\"example\": extract_rows(example), \"expected\": extract_rows(expected), } ) if len(result) < len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\"", "\\| new path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if not table: return [] return", "Result\\n original path \\| new path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if not table:", "len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has more sections than matches.') return result if __name__", "__name__ == \"__main__\": for (i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] = i print(\"Section", "---\\|--- ((?:.+\\n)*) #### Result\\n original path \\| new path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table):", "#### Example\\n original path \\| new name ---\\|--- ((?:.+\\n)*) #### Result\\n original path", "context from src.goodies import print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n #### Example\\n original path", "((?:.+\\n)*)\"\"\" def extract_rows(table): if not table: return [] return [tuple(row.split(\"|\")) for row in", "Example\\n original path \\| new name ---\\|--- ((?:.+\\n)*) #### Result\\n original path \\|", "return [] return [tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")] def main(path): result = []", "def extract_rows(table): if not table: return [] return [tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")]", "for match in re.finditer(SECTION_PATTERN, text): (title, example, expected) = match.groups() result.append( { \"title\":", "import context from src.goodies import print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n #### Example\\n original", "[tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")] def main(path): result = [] text = path.read_text()", "import Path import re import context from src.goodies import print_warning SECTION_PATTERN = r\"\"\"(?m)^###", "\\| new name ---\\|--- ((?:.+\\n)*) #### Result\\n original path \\| new path ---\\|---", "Path import re import context from src.goodies import print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n", "from src.goodies import print_warning SECTION_PATTERN = r\"\"\"(?m)^### (.+)\\n #### Example\\n original path \\|", "< len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has more sections than matches.') return result if", "\"__main__\": for (i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] = i print(\"Section {#}: {title}\".format(**test_data))", "expected) = match.groups() result.append( { \"title\": title, \"example\": extract_rows(example), \"expected\": extract_rows(expected), } )", "= [] text = path.read_text() for match in re.finditer(SECTION_PATTERN, text): (title, example, expected)", "return [tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")] def main(path): result = [] text =", "new name ---\\|--- ((?:.+\\n)*) #### Result\\n original path \\| new path ---\\|--- ((?:.+\\n)*)\"\"\"", "for row in table.strip().split(\"\\n\")] def main(path): result = [] text = path.read_text() for", "new path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if not table: return [] return [tuple(row.split(\"|\"))", "\"expected\": extract_rows(expected), } ) if len(result) < len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has more", "in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] = i print(\"Section {#}: {title}\".format(**test_data)) print(\"Example: {example}\".format(**test_data)) print(\"Expected: {expected}\".format(**test_data))", "re.finditer(SECTION_PATTERN, text): (title, example, expected) = match.groups() result.append( { \"title\": title, \"example\": extract_rows(example),", "(title, example, expected) = match.groups() result.append( { \"title\": title, \"example\": extract_rows(example), \"expected\": extract_rows(expected),", "for (i, test_data) in enumerate(main(Path(\"test/examples.md\")), 1): test_data[\"#\"] = i print(\"Section {#}: {title}\".format(**test_data)) print(\"Example:", "= path.read_text() for match in re.finditer(SECTION_PATTERN, text): (title, example, expected) = match.groups() result.append(", "= r\"\"\"(?m)^### (.+)\\n #### Example\\n original path \\| new name ---\\|--- ((?:.+\\n)*) ####", "match.groups() result.append( { \"title\": title, \"example\": extract_rows(example), \"expected\": extract_rows(expected), } ) if len(result)", "row in table.strip().split(\"\\n\")] def main(path): result = [] text = path.read_text() for match", "path \\| new path ---\\|--- ((?:.+\\n)*)\"\"\" def extract_rows(table): if not table: return []", "(.+)\\n #### Example\\n original path \\| new name ---\\|--- ((?:.+\\n)*) #### Result\\n original", "\", text)): print_warning('\"examples.md\" has more sections than matches.') return result if __name__ ==", "result.append( { \"title\": title, \"example\": extract_rows(example), \"expected\": extract_rows(expected), } ) if len(result) <", "<reponame>poponealex/files_renamer from pathlib import Path import re import context from src.goodies import print_warning", "text)): print_warning('\"examples.md\" has more sections than matches.') return result if __name__ == \"__main__\":", "extract_rows(table): if not table: return [] return [tuple(row.split(\"|\")) for row in table.strip().split(\"\\n\")] def", "main(path): result = [] text = path.read_text() for match in re.finditer(SECTION_PATTERN, text): (title,", "in table.strip().split(\"\\n\")] def main(path): result = [] text = path.read_text() for match in", ") if len(result) < len(re.findall(\"(?m)^### \", text)): print_warning('\"examples.md\" has more sections than matches.')" ]
[ "= self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_get_or_create(self): \"\"\" Test get or", "\"foo\" self.email = \"<EMAIL>\" self.password = \"password\" self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url", "from django.conf import settings from django.test import TestCase from django.utils.http import urlencode from", "django.utils.http import urlencode from django.contrib.auth.models import User from django.urls import reverse from indieweb.models", "range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self): \"\"\" Test", "self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self): \"\"\" Test timeout is resetted on", "\"\"\" self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self):", "= \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure we are redirected to login if", "def test_auth_timeout_reset(self): \"\"\" Test timeout is resetted on new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>)", "------------ Tests for `django-indieweb` auth endpoint. \"\"\" import pytz from datetime import datetime", "\"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self):", "= \"<EMAIL>\" self.password = \"password\" self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse(\"indieweb:auth\")", "self.assertTrue(\"code\" in response.url) def test_get_or_create(self): \"\"\" Test get or create logic for Auth", "import User from django.urls import reverse from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def", "setUp(self): self.username = \"foo\" self.email = \"<EMAIL>\" self.password = \"password\" self.user = User.objects.create_user(self.username,", "Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created - timedelta(seconds=timeout +", "reverse(\"indieweb:auth\") url_params = { \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\":", "in. \"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self): \"\"\"", "1234567890, \"scope\": \"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure we", "\"\"\" Assure get without proper parameters raises an error. \"\"\" self.client.login(username=self.username, password=self.password) response", "to get an auth-code from the indieweb auth endpoint and are not yet", "class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = \"foo\" self.email = \"<EMAIL>\" self.password = \"password\"", "\"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\", } self.endpoint_url =", "we try to get an auth-code from the indieweb auth endpoint and are", "self.password) self.base_url = reverse(\"indieweb:auth\") url_params = { \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\",", "indieweb auth endpoint and are not yet logged in. \"\"\" response = self.client.get(self.endpoint_url)", "\"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created - timedelta(seconds=timeout + 10) auth.save() response = self.client.get(self.endpoint_url)", "self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we get back an auth", "if we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in", "response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings,", "\"\"\"Assure we get back an auth code if we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>)", "utf-8 -*- \"\"\" test_django-indieweb ------------ Tests for `django-indieweb` auth endpoint. \"\"\" import pytz", "60) auth.created = auth.created - timedelta(seconds=timeout + 10) auth.save() response = self.client.get(self.endpoint_url) auth", "logged in. \"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self):", "auth endpoint and are not yet logged in. \"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code,", "in response.url) def test_auth_timeout_reset(self): \"\"\" Test timeout is resetted on new authentication. \"\"\"", "urllib.parse import parse_qs from urllib.parse import urlparse from django.conf import settings from django.test", "self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_get_or_create(self): \"\"\" Test get or create logic", "= { \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\", }", "\"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure we are redirected", "response.url) def test_authenticated_without_params(self): \"\"\" Assure get without proper parameters raises an error. \"\"\"", "self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_get_or_create(self): \"\"\"", "we are redirected to login if we try to get an auth-code from", "is resetted on new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data =", "self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self): \"\"\" Test timeout is resetted on new authentication.", "password=<PASSWORD>) response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout =", "datetime from datetime import timedelta from urllib.parse import parse_qs from urllib.parse import urlparse", "import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = \"foo\" self.email = \"<EMAIL>\" self.password", "Assure get without proper parameters raises an error. \"\"\" self.client.login(username=self.username, password=self.password) response =", "password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_get_or_create(self): \"\"\" Test", "self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse(\"indieweb:auth\") url_params = { \"me\": \"http://example.org\",", "self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout", "import urlparse from django.conf import settings from django.test import TestCase from django.utils.http import", "404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we get back an auth code", "\"\"\" Test get or create logic for Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for", "from django.urls import reverse from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username", "timedelta from urllib.parse import parse_qs from urllib.parse import urlparse from django.conf import settings", "\"\"\" import pytz from datetime import datetime from datetime import timedelta from urllib.parse", "self.client.login(username=self.username, password=<PASSWORD>) for i in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in", "response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self): \"\"\" Test timeout", "we get back an auth code if we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response", "\"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\"", "Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code,", "or create logic for Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i in range(2):", "Tests for `django-indieweb` auth endpoint. \"\"\" import pytz from datetime import datetime from", "reverse from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = \"foo\" self.email", "self.password = \"password\" self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse(\"indieweb:auth\") url_params =", "authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_get_or_create(self):", "coding: utf-8 -*- \"\"\" test_django-indieweb ------------ Tests for `django-indieweb` auth endpoint. \"\"\" import", "self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self): \"\"\" Test timeout is resetted", "self.username = \"foo\" self.email = \"<EMAIL>\" self.password = \"password\" self.user = User.objects.create_user(self.username, self.email,", "me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created - timedelta(seconds=timeout + 10)", "#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\" test_django-indieweb ------------ Tests for `django-indieweb`", "datetime import timedelta from urllib.parse import parse_qs from urllib.parse import urlparse from django.conf", "without proper parameters raises an error. \"\"\" self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code,", "object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302)", "redirected to login if we try to get an auth-code from the indieweb", "password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we", "import urlencode from django.contrib.auth.models import User from django.urls import reverse from indieweb.models import", "for `django-indieweb` auth endpoint. \"\"\" import pytz from datetime import datetime from datetime", "def test_not_authenticated(self): \"\"\" Assure we are redirected to login if we try to", "self.email = \"<EMAIL>\" self.password = \"password\" self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url =", "proper parameters raises an error. \"\"\" self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404)", "self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we get back an", "python # -*- coding: utf-8 -*- \"\"\" test_django-indieweb ------------ Tests for `django-indieweb` auth", "urlparse from django.conf import settings from django.test import TestCase from django.utils.http import urlencode", "parameters raises an error. \"\"\" self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\"", "resetted on new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query)", "endpoint and are not yet logged in. \"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302)", "\"\"\" test_django-indieweb ------------ Tests for `django-indieweb` auth endpoint. \"\"\" import pytz from datetime", "import settings from django.test import TestCase from django.utils.http import urlencode from django.contrib.auth.models import", "def test_get_or_create(self): \"\"\" Test get or create logic for Auth object. \"\"\" self.client.login(username=self.username,", "from datetime import timedelta from urllib.parse import parse_qs from urllib.parse import urlparse from", "django.contrib.auth.models import User from django.urls import reverse from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase):", "from urllib.parse import parse_qs from urllib.parse import urlparse from django.conf import settings from", "TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = \"foo\" self.email = \"<EMAIL>\" self.password = \"password\" self.user", "= self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self): \"\"\" Test timeout is", "to login if we try to get an auth-code from the indieweb auth", "parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created", "= getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created - timedelta(seconds=timeout + 10) auth.save() response", "self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self): \"\"\" Assure get without proper", "= self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self): \"\"\" Assure get without", "test_authenticated(self): \"\"\"Assure we get back an auth code if we are authenticated.\"\"\" self.client.login(username=self.username,", "\"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self): \"\"\" Assure", "getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created - timedelta(seconds=timeout + 10) auth.save() response =", "auth code if we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302)", "import parse_qs from urllib.parse import urlparse from django.conf import settings from django.test import", "on new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth", "auth-code from the indieweb auth endpoint and are not yet logged in. \"\"\"", "if we try to get an auth-code from the indieweb auth endpoint and", "get back an auth code if we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response =", "error. \"\"\" self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def", "settings from django.test import TestCase from django.utils.http import urlencode from django.contrib.auth.models import User", "urlencode from django.contrib.auth.models import User from django.urls import reverse from indieweb.models import Auth", "= parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created =", "in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self): \"\"\"", "= self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\",", "\"state\": 1234567890, \"scope\": \"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure", "= User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse(\"indieweb:auth\") url_params = { \"me\": \"http://example.org\", \"client_id\":", "and are not yet logged in. \"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\"", "response.url) def test_get_or_create(self): \"\"\" Test get or create logic for Auth object. \"\"\"", "timeout is resetted on new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data", "auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created -", "# -*- coding: utf-8 -*- \"\"\" test_django-indieweb ------------ Tests for `django-indieweb` auth endpoint.", "-*- coding: utf-8 -*- \"\"\" test_django-indieweb ------------ Tests for `django-indieweb` auth endpoint. \"\"\"", "pytz from datetime import datetime from datetime import timedelta from urllib.parse import parse_qs", "from the indieweb auth endpoint and are not yet logged in. \"\"\" response", "code if we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\"", "auth.created - timedelta(seconds=timeout + 10) auth.save() response = self.client.get(self.endpoint_url) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0])", "302) self.assertTrue(\"code\" in response.url) def test_get_or_create(self): \"\"\" Test get or create logic for", "10) auth.save() response = self.client.get(self.endpoint_url) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) self.assertTrue((datetime.now(pytz.utc) - auth.created).seconds <=", "response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_get_or_create(self): \"\"\" Test get", "} self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure we are redirected to", "\"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\"", "are not yet logged in. \"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in", "\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0])", "= reverse(\"indieweb:auth\") url_params = { \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890,", "self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60)", "get or create logic for Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i in", "an auth code if we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code,", "from django.test import TestCase from django.utils.http import urlencode from django.contrib.auth.models import User from", "\"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure we are redirected to login if we", "get without proper parameters raises an error. \"\"\" self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url)", "get an auth-code from the indieweb auth endpoint and are not yet logged", "parse_qs from urllib.parse import urlparse from django.conf import settings from django.test import TestCase", "test_not_authenticated(self): \"\"\" Assure we are redirected to login if we try to get", "in response.url) def test_get_or_create(self): \"\"\" Test get or create logic for Auth object.", "try to get an auth-code from the indieweb auth endpoint and are not", "the indieweb auth endpoint and are not yet logged in. \"\"\" response =", "auth endpoint. \"\"\" import pytz from datetime import datetime from datetime import timedelta", "auth.created = auth.created - timedelta(seconds=timeout + 10) auth.save() response = self.client.get(self.endpoint_url) auth =", "def setUp(self): self.username = \"foo\" self.email = \"<EMAIL>\" self.password = \"password\" self.user =", "User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse(\"indieweb:auth\") url_params = { \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\",", "{ \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\", } self.endpoint_url", "urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure we are redirected to login if we try", "\"<EMAIL>\" self.password = \"password\" self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse(\"indieweb:auth\") url_params", "302) self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self): \"\"\" Assure get without proper parameters raises", "for i in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def", "datetime import datetime from datetime import timedelta from urllib.parse import parse_qs from urllib.parse", "self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure", "= \"password\" self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse(\"indieweb:auth\") url_params = {", "yet logged in. \"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url) def", "import datetime from datetime import timedelta from urllib.parse import parse_qs from urllib.parse import", "an error. \"\"\" self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\"))", "we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url)", "-*- \"\"\" test_django-indieweb ------------ Tests for `django-indieweb` auth endpoint. \"\"\" import pytz from", "from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = \"foo\" self.email =", "urllib.parse import urlparse from django.conf import settings from django.test import TestCase from django.utils.http", "not yet logged in. \"\"\" response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url)", "django.test import TestCase from django.utils.http import urlencode from django.contrib.auth.models import User from django.urls", "timedelta(seconds=timeout + 10) auth.save() response = self.client.get(self.endpoint_url) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) self.assertTrue((datetime.now(pytz.utc) -", "indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = \"foo\" self.email = \"<EMAIL>\"", "= self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we get back", "are redirected to login if we try to get an auth-code from the", "in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we get back an auth code if we", "data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created", "\"scope\": \"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure we are", "url_params = { \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\",", "+ 10) auth.save() response = self.client.get(self.endpoint_url) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) self.assertTrue((datetime.now(pytz.utc) - auth.created).seconds", "an auth-code from the indieweb auth endpoint and are not yet logged in.", "authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user,", "= Auth.objects.get(owner=self.user, me=data[\"me\"][0]) timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created - timedelta(seconds=timeout", "from datetime import datetime from datetime import timedelta from urllib.parse import parse_qs from", "302) self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self): \"\"\" Test timeout is resetted on new", "\"\"\" Assure we are redirected to login if we try to get an", "endpoint. \"\"\" import pytz from datetime import datetime from datetime import timedelta from", "`django-indieweb` auth endpoint. \"\"\" import pytz from datetime import datetime from datetime import", "\"password\" self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse(\"indieweb:auth\") url_params = { \"me\":", "import pytz from datetime import datetime from datetime import timedelta from urllib.parse import", "test_get_or_create(self): \"\"\" Test get or create logic for Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>)", "test_auth_timeout_reset(self): \"\"\" Test timeout is resetted on new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response", "self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self): \"\"\" Assure get without proper parameters", "= auth.created - timedelta(seconds=timeout + 10) auth.save() response = self.client.get(self.endpoint_url) auth = Auth.objects.get(owner=self.user,", "\"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url,", "response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we get back an auth code if we are", "from django.utils.http import urlencode from django.contrib.auth.models import User from django.urls import reverse from", "self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_get_or_create(self): \"\"\" Test get or create", "response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self): \"\"\" Assure get", "import reverse from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = \"foo\"", "TestCase from django.utils.http import urlencode from django.contrib.auth.models import User from django.urls import reverse", "def test_authenticated_without_params(self): \"\"\" Assure get without proper parameters raises an error. \"\"\" self.client.login(username=self.username,", "import timedelta from urllib.parse import parse_qs from urllib.parse import urlparse from django.conf import", "from django.contrib.auth.models import User from django.urls import reverse from indieweb.models import Auth class", "new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth =", "Assure we are redirected to login if we try to get an auth-code", "= \"foo\" self.email = \"<EMAIL>\" self.password = \"password\" self.user = User.objects.create_user(self.username, self.email, self.password)", "\"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params))", "Test get or create logic for Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i", "self.assertTrue(\"login\" in response.url) def test_authenticated_without_params(self): \"\"\" Assure get without proper parameters raises an", "logic for Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i in range(2): response =", "- timedelta(seconds=timeout + 10) auth.save() response = self.client.get(self.endpoint_url) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) self.assertTrue((datetime.now(pytz.utc)", "test_authenticated_without_params(self): \"\"\" Assure get without proper parameters raises an error. \"\"\" self.client.login(username=self.username, password=self.password)", "response.url) def test_auth_timeout_reset(self): \"\"\" Test timeout is resetted on new authentication. \"\"\" self.client.login(username=self.username,", "\"\"\" Test timeout is resetted on new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response =", "auth.save() response = self.client.get(self.endpoint_url) auth = Auth.objects.get(owner=self.user, me=data[\"me\"][0]) self.assertTrue((datetime.now(pytz.utc) - auth.created).seconds <= timeout)", "create logic for Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i in range(2): response", "password=<PASSWORD>) for i in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url)", "self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): \"\"\" Assure we are redirected to login", "for Auth object. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) for i in range(2): response = self.client.get(self.endpoint_url)", "response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we get", "User from django.urls import reverse from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self):", "back an auth code if we are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url)", "timeout = getattr(settings, \"INDIWEB_AUTH_CODE_TIMEOUT\", 60) auth.created = auth.created - timedelta(seconds=timeout + 10) auth.save()", "self.assertTrue(\"missing\" in response.content.decode(\"utf-8\")) def test_authenticated(self): \"\"\"Assure we get back an auth code if", "self.base_url = reverse(\"indieweb:auth\") url_params = { \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\":", "from urllib.parse import urlparse from django.conf import settings from django.test import TestCase from", "def test_authenticated(self): \"\"\"Assure we get back an auth code if we are authenticated.\"\"\"", "django.conf import settings from django.test import TestCase from django.utils.http import urlencode from django.contrib.auth.models", "Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = \"foo\" self.email = \"<EMAIL>\" self.password =", "\"https://webapp.example.org\", \"redirect_uri\": \"https://webapp.example.org/auth/callback\", \"state\": 1234567890, \"scope\": \"post\", } self.endpoint_url = \"{}?{}\".format(self.base_url, urlencode(url_params)) def", "i in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def test_auth_timeout_reset(self):", "login if we try to get an auth-code from the indieweb auth endpoint", "in response.url) def test_authenticated_without_params(self): \"\"\" Assure get without proper parameters raises an error.", "are authenticated.\"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue(\"code\" in response.url) def", "Test timeout is resetted on new authentication. \"\"\" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url)", "raises an error. \"\"\" self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue(\"missing\" in", "self.email, self.password) self.base_url = reverse(\"indieweb:auth\") url_params = { \"me\": \"http://example.org\", \"client_id\": \"https://webapp.example.org\", \"redirect_uri\":", "django.urls import reverse from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username =", "test_django-indieweb ------------ Tests for `django-indieweb` auth endpoint. \"\"\" import pytz from datetime import", "import TestCase from django.utils.http import urlencode from django.contrib.auth.models import User from django.urls import" ]
[ "self.ax if i > 0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8)", "self.time_window = time_window def _show(self, env): self.ax = env.get_axes() self.ax.set_title('Control signals over time')", "_update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1) self.readings[:, -1] = robot_velocities", "ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 * abs(mins[i]), maxs[i]", "= marxbot self.marxbot_max_vel = 30 self.time_window = time_window def _show(self, env): self.ax =", "= self.ax if i > 0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i])", "def _show(self, env): self.ax = env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window,", "self.marxbot_max_vel = 30 self.time_window = time_window def _show(self, env): self.ax = env.get_axes() self.ax.set_title('Control", "- 0.1 * abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i]) ) self.plots.append(plot) def _update(self):", "[] for i in range(self.n_dims): ax = self.ax if i > 0: ax", "= np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels = [\"linear velocity", "numpy as np from kinematics import to_robot_velocities from viz.env import Viz class ControlSignalsViz(Viz):", "_show(self, env): self.ax = env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0)", "import Viz class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel", "i > 0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot =", "self.marxbot = marxbot self.marxbot_max_vel = 30 self.time_window = time_window def _show(self, env): self.ax", "def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1) self.readings[:, -1] =", "marxbot, time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel = 30 self.time_window = time_window def", "self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels = [\"linear velocity [cm/s]\", \"angular velocity [rad/s]\"]", "= [+self.marxbot_max_vel, +10] self.plots = [] for i in range(self.n_dims): ax = self.ax", "to_robot_velocities from viz.env import Viz class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot", "* abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities =", ") self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1) self.readings[:,", "from viz.env import Viz class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot =", "__init__(self, marxbot, time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel = 30 self.time_window = time_window", "+ 0.1 * abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings =", "0.1 * abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities", "0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels = [\"linear velocity [cm/s]\", \"angular", "color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1", "+10] self.plots = [] for i in range(self.n_dims): ax = self.ax if i", "range(self.n_dims): ax = self.ax if i > 0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i])", "* abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1,", "[+self.marxbot_max_vel, +10] self.plots = [] for i in range(self.n_dims): ax = self.ax if", "time_window def _show(self, env): self.ax = env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time [s]\")", "= [] for i in range(self.n_dims): ax = self.ax if i > 0:", "super().__init__() self.marxbot = marxbot self.marxbot_max_vel = 30 self.time_window = time_window def _show(self, env):", "kinematics import to_robot_velocities from viz.env import Viz class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10):", "time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2 self.n_samples = round(self.time_window /", "abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds)", "[s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2 self.n_samples = round(self.time_window / env.refresh_interval) self.time", "env): self.ax = env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True)", "np from kinematics import to_robot_velocities from viz.env import Viz class ControlSignalsViz(Viz): def __init__(self,", "self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels = [\"linear", "np.full((self.n_dims, self.n_samples), np.nan) labels = [\"linear velocity [cm/s]\", \"angular velocity [rad/s]\"] colors =", "self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2 self.n_samples = round(self.time_window / env.refresh_interval)", "= np.roll(self.readings, -1, axis=1) self.readings[:, -1] = robot_velocities for i in range(self.n_dims): self.plots[i].set_ydata(self.readings[i])", "from kinematics import to_robot_velocities from viz.env import Viz class ControlSignalsViz(Viz): def __init__(self, marxbot,", "= ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim(", "velocity [cm/s]\", \"angular velocity [rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel, -10]", "/ env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels", "plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 * abs(mins[i]), maxs[i] +", "self.n_dims = 2 self.n_samples = round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples)", "ax = self.ax if i > 0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y',", "round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan)", "maxs = [+self.marxbot_max_vel, +10] self.plots = [] for i in range(self.n_dims): ax =", "ax.set_ylim( mins[i] - 0.1 * abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i]) ) self.plots.append(plot)", "i in range(self.n_dims): ax = self.ax if i > 0: ax = ax.twinx()", "0) self.ax.grid(True) self.n_dims = 2 self.n_samples = round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window,", "= time_window def _show(self, env): self.ax = env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time", "import to_robot_velocities from viz.env import Viz class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__()", "[-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10] self.plots = [] for i in range(self.n_dims):", "self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2 self.n_samples", "self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels = [\"linear velocity [cm/s]\", \"angular velocity", "= [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10] self.plots = [] for i in", "= 2 self.n_samples = round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings", "0.1 * abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings,", "self.ax.grid(True) self.n_dims = 2 self.n_samples = round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window, 0,", "velocity [rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel,", "0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i],", "robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1) self.readings[:, -1] = robot_velocities for", "[rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10]", "np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels = [\"linear velocity [cm/s]\",", "self.ax = env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims", "over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2 self.n_samples = round(self.time_window", "= ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 * abs(mins[i]), maxs[i] + 0.1", "self.n_samples), np.nan) labels = [\"linear velocity [cm/s]\", \"angular velocity [rad/s]\"] colors = [\"tab:blue\",", "to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1) self.readings[:, -1] = robot_velocities for i in", "in range(self.n_dims): ax = self.ax if i > 0: ax = ax.twinx() ax.set_ylabel(labels[i],", "ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0]", "\"tab:orange\"] mins = [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10] self.plots = [] for", "import numpy as np from kinematics import to_robot_velocities from viz.env import Viz class", "viz.env import Viz class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot = marxbot", "\"angular velocity [rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel, -10] maxs =", "maxs[i] + 0.1 * abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings", "[\"linear velocity [cm/s]\", \"angular velocity [rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel,", "Viz class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel =", "class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel = 30", "ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel = 30 self.time_window", "[\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10] self.plots = []", "= 30 self.time_window = time_window def _show(self, env): self.ax = env.get_axes() self.ax.set_title('Control signals", "self.n_samples = round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims,", "def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel = 30 self.time_window =", "mins[i] - 0.1 * abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i]) ) self.plots.append(plot) def", "mins = [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10] self.plots = [] for i", "abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1)", "env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels =", "marxbot self.marxbot_max_vel = 30 self.time_window = time_window def _show(self, env): self.ax = env.get_axes()", "= [\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10] self.plots =", "ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 *", "for i in range(self.n_dims): ax = self.ax if i > 0: ax =", "ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] -", "ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i]", "2 self.n_samples = round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings =", "env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2", "colors = [\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10] self.plots", "30 self.time_window = time_window def _show(self, env): self.ax = env.get_axes() self.ax.set_title('Control signals over", "np.nan) labels = [\"linear velocity [cm/s]\", \"angular velocity [rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"]", "self.plots = [] for i in range(self.n_dims): ax = self.ax if i >", "self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 * abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i])", "= np.full((self.n_dims, self.n_samples), np.nan) labels = [\"linear velocity [cm/s]\", \"angular velocity [rad/s]\"] colors", "[cm/s]\", \"angular velocity [rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"] mins = [-self.marxbot_max_vel, -10] maxs", "labels = [\"linear velocity [cm/s]\", \"angular velocity [rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"] mins", "ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 * abs(mins[i]), maxs[i] + 0.1 *", "> 0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time,", "color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 * abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i]) )", "time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel = 30 self.time_window = time_window def _show(self,", "signals over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2 self.n_samples =", "= [\"linear velocity [cm/s]\", \"angular velocity [rad/s]\"] colors = [\"tab:blue\", \"tab:orange\"] mins =", "if i > 0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot", "self.readings = np.roll(self.readings, -1, axis=1) self.readings[:, -1] = robot_velocities for i in range(self.n_dims):", "self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1) self.readings[:, -1]", "= env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel(\"time [s]\") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims =", "= to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1) self.readings[:, -1] = robot_velocities for i", "-10] maxs = [+self.marxbot_max_vel, +10] self.plots = [] for i in range(self.n_dims): ax", "as np from kinematics import to_robot_velocities from viz.env import Viz class ControlSignalsViz(Viz): def", "labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 * abs(mins[i]),", "self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2 self.n_samples = round(self.time_window / env.refresh_interval) self.time =", "= round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples)," ]
[ "없고, 이미 역할이 지정된 keyword는 변수 이름으로 사용할 수 없다. ''' for a", "(r ** 2), '\\n') INCH_TO_CENTIMETER = 2.54 for centimeter in range(1, 11): print(centimeter,", "r in (0.1, 3, 15, 100): print('r =', r) print('PI * (r **", "(1, 3, 10, -2): print('a =', a) print('a × 3 + 1 =',", "값을 다시 대입하지 않는다. ''' PI = 3.14159265 for r in (0.1, 3,", "숫자나 '_' 이외의 특수문자로 시작할 수 없고, 이미 역할이 지정된 keyword는 변수 이름으로", "''' Python에서는 변수를 선언하여 사용할 수 있다. 변수 이름은 문자나 숫자로 이루어지는데, 변수", "이루어지는데, 변수 이름은 숫자나 '_' 이외의 특수문자로 시작할 수 없고, 이미 역할이 지정된", "-*- # 변수 ''' Python에서는 변수를 선언하여 사용할 수 있다. 변수 이름은 문자나", "않는다. 관습적으로 대문자와 '_' 만으로 이루어진 변수는 상수로 취급하여, 처음 대입된 값 이외의", "1 =', a * 3 + 1, '\\n') # 상수 ''' Python에서 변수와", "INCH_TO_CENTIMETER = 2.54 for centimeter in range(1, 11): print(centimeter, 'cm ==',\\ centimeter *", "수 있다. 변수 이름은 문자나 숫자로 이루어지는데, 변수 이름은 숫자나 '_' 이외의 특수문자로", "+ 1, '\\n') # 상수 ''' Python에서 변수와 같은 이름을 갖는 상수를 선언하는", "keyword는 변수 이름으로 사용할 수 없다. ''' for a in (1, 3, 10,", "대문자와 '_' 만으로 이루어진 변수는 상수로 취급하여, 처음 대입된 값 이외의 값을 다시", "대입된 값 이외의 값을 다시 대입하지 않는다. ''' PI = 3.14159265 for r", "'\\n') # 상수 ''' Python에서 변수와 같은 이름을 갖는 상수를 선언하는 방법은 기본", "for r in (0.1, 3, 15, 100): print('r =', r) print('PI * (r", "3 + 1, '\\n') # 상수 ''' Python에서 변수와 같은 이름을 갖는 상수를", "변수 이름으로 사용할 수 없다. ''' for a in (1, 3, 10, -2):", "수 없다. ''' for a in (1, 3, 10, -2): print('a =', a)", "시작할 수 없고, 이미 역할이 지정된 keyword는 변수 이름으로 사용할 수 없다. '''", "print('a × 3 + 1 =', a * 3 + 1, '\\n') #", "in (0.1, 3, 15, 100): print('r =', r) print('PI * (r ** 2)", "** 2), '\\n') INCH_TO_CENTIMETER = 2.54 for centimeter in range(1, 11): print(centimeter, 'cm", "for a in (1, 3, 10, -2): print('a =', a) print('a × 3", "방법은 기본 문법으로는 제공되지 않는다. 관습적으로 대문자와 '_' 만으로 이루어진 변수는 상수로 취급하여,", "상수 ''' Python에서 변수와 같은 이름을 갖는 상수를 선언하는 방법은 기본 문법으로는 제공되지", "print('a =', a) print('a × 3 + 1 =', a * 3 +", "PI * (r ** 2), '\\n') INCH_TO_CENTIMETER = 2.54 for centimeter in range(1,", "변수 ''' Python에서는 변수를 선언하여 사용할 수 있다. 변수 이름은 문자나 숫자로 이루어지는데,", "=', PI * (r ** 2), '\\n') INCH_TO_CENTIMETER = 2.54 for centimeter in", "다시 대입하지 않는다. ''' PI = 3.14159265 for r in (0.1, 3, 15,", "a * 3 + 1, '\\n') # 상수 ''' Python에서 변수와 같은 이름을", "이루어진 변수는 상수로 취급하여, 처음 대입된 값 이외의 값을 다시 대입하지 않는다. '''", "1, '\\n') # 상수 ''' Python에서 변수와 같은 이름을 갖는 상수를 선언하는 방법은", "변수 이름은 문자나 숫자로 이루어지는데, 변수 이름은 숫자나 '_' 이외의 특수문자로 시작할 수", "상수를 선언하는 방법은 기본 문법으로는 제공되지 않는다. 관습적으로 대문자와 '_' 만으로 이루어진 변수는", "= 2.54 for centimeter in range(1, 11): print(centimeter, 'cm ==',\\ centimeter * INCH_TO_CENTIMETER,", "역할이 지정된 keyword는 변수 이름으로 사용할 수 없다. ''' for a in (1,", "사용할 수 없다. ''' for a in (1, 3, 10, -2): print('a =',", "print('r =', r) print('PI * (r ** 2) =', PI * (r **", "10, -2): print('a =', a) print('a × 3 + 1 =', a *", "-2): print('a =', a) print('a × 3 + 1 =', a * 3", "관습적으로 대문자와 '_' 만으로 이루어진 변수는 상수로 취급하여, 처음 대입된 값 이외의 값을", "python3 # -*- coding: utf-8 -*- # 변수 ''' Python에서는 변수를 선언하여 사용할", "3, 10, -2): print('a =', a) print('a × 3 + 1 =', a", "3, 15, 100): print('r =', r) print('PI * (r ** 2) =', PI", "이미 역할이 지정된 keyword는 변수 이름으로 사용할 수 없다. ''' for a in", "# 상수 ''' Python에서 변수와 같은 이름을 갖는 상수를 선언하는 방법은 기본 문법으로는", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- # 변수 ''' Python에서는 변수를 선언하여", "2) =', PI * (r ** 2), '\\n') INCH_TO_CENTIMETER = 2.54 for centimeter", "# 변수 ''' Python에서는 변수를 선언하여 사용할 수 있다. 변수 이름은 문자나 숫자로", "변수는 상수로 취급하여, 처음 대입된 값 이외의 값을 다시 대입하지 않는다. ''' PI", "''' for a in (1, 3, 10, -2): print('a =', a) print('a ×", "Python에서는 변수를 선언하여 사용할 수 있다. 변수 이름은 문자나 숫자로 이루어지는데, 변수 이름은", "Python에서 변수와 같은 이름을 갖는 상수를 선언하는 방법은 기본 문법으로는 제공되지 않는다. 관습적으로", "문자나 숫자로 이루어지는데, 변수 이름은 숫자나 '_' 이외의 특수문자로 시작할 수 없고, 이미", "=', a * 3 + 1, '\\n') # 상수 ''' Python에서 변수와 같은", "값 이외의 값을 다시 대입하지 않는다. ''' PI = 3.14159265 for r in", "이외의 값을 다시 대입하지 않는다. ''' PI = 3.14159265 for r in (0.1,", "* (r ** 2), '\\n') INCH_TO_CENTIMETER = 2.54 for centimeter in range(1, 11):", "'\\n') INCH_TO_CENTIMETER = 2.54 for centimeter in range(1, 11): print(centimeter, 'cm ==',\\ centimeter", "특수문자로 시작할 수 없고, 이미 역할이 지정된 keyword는 변수 이름으로 사용할 수 없다.", "않는다. ''' PI = 3.14159265 for r in (0.1, 3, 15, 100): print('r", "문법으로는 제공되지 않는다. 관습적으로 대문자와 '_' 만으로 이루어진 변수는 상수로 취급하여, 처음 대입된", "처음 대입된 값 이외의 값을 다시 대입하지 않는다. ''' PI = 3.14159265 for", "100): print('r =', r) print('PI * (r ** 2) =', PI * (r", "<reponame>dhchoi82/mathematics-python #!/usr/bin/env python3 # -*- coding: utf-8 -*- # 변수 ''' Python에서는 변수를", "2.54 for centimeter in range(1, 11): print(centimeter, 'cm ==',\\ centimeter * INCH_TO_CENTIMETER, 'in\\n')", "수 없고, 이미 역할이 지정된 keyword는 변수 이름으로 사용할 수 없다. ''' for", "print('PI * (r ** 2) =', PI * (r ** 2), '\\n') INCH_TO_CENTIMETER", "이름은 숫자나 '_' 이외의 특수문자로 시작할 수 없고, 이미 역할이 지정된 keyword는 변수", "× 3 + 1 =', a * 3 + 1, '\\n') # 상수", "이름을 갖는 상수를 선언하는 방법은 기본 문법으로는 제공되지 않는다. 관습적으로 대문자와 '_' 만으로", "''' PI = 3.14159265 for r in (0.1, 3, 15, 100): print('r =',", "3.14159265 for r in (0.1, 3, 15, 100): print('r =', r) print('PI *", "a in (1, 3, 10, -2): print('a =', a) print('a × 3 +", "사용할 수 있다. 변수 이름은 문자나 숫자로 이루어지는데, 변수 이름은 숫자나 '_' 이외의", "갖는 상수를 선언하는 방법은 기본 문법으로는 제공되지 않는다. 관습적으로 대문자와 '_' 만으로 이루어진", "15, 100): print('r =', r) print('PI * (r ** 2) =', PI *", "2), '\\n') INCH_TO_CENTIMETER = 2.54 for centimeter in range(1, 11): print(centimeter, 'cm ==',\\", "= 3.14159265 for r in (0.1, 3, 15, 100): print('r =', r) print('PI", "선언하여 사용할 수 있다. 변수 이름은 문자나 숫자로 이루어지는데, 변수 이름은 숫자나 '_'", "3 + 1 =', a * 3 + 1, '\\n') # 상수 '''", "** 2) =', PI * (r ** 2), '\\n') INCH_TO_CENTIMETER = 2.54 for", "coding: utf-8 -*- # 변수 ''' Python에서는 변수를 선언하여 사용할 수 있다. 변수", "이름으로 사용할 수 없다. ''' for a in (1, 3, 10, -2): print('a", "상수로 취급하여, 처음 대입된 값 이외의 값을 다시 대입하지 않는다. ''' PI =", "+ 1 =', a * 3 + 1, '\\n') # 상수 ''' Python에서", "r) print('PI * (r ** 2) =', PI * (r ** 2), '\\n')", "변수와 같은 이름을 갖는 상수를 선언하는 방법은 기본 문법으로는 제공되지 않는다. 관습적으로 대문자와", "'_' 이외의 특수문자로 시작할 수 없고, 이미 역할이 지정된 keyword는 변수 이름으로 사용할", "=', r) print('PI * (r ** 2) =', PI * (r ** 2),", "제공되지 않는다. 관습적으로 대문자와 '_' 만으로 이루어진 변수는 상수로 취급하여, 처음 대입된 값", "변수 이름은 숫자나 '_' 이외의 특수문자로 시작할 수 없고, 이미 역할이 지정된 keyword는", "'_' 만으로 이루어진 변수는 상수로 취급하여, 처음 대입된 값 이외의 값을 다시 대입하지", "이외의 특수문자로 시작할 수 없고, 이미 역할이 지정된 keyword는 변수 이름으로 사용할 수", "선언하는 방법은 기본 문법으로는 제공되지 않는다. 관습적으로 대문자와 '_' 만으로 이루어진 변수는 상수로", "utf-8 -*- # 변수 ''' Python에서는 변수를 선언하여 사용할 수 있다. 변수 이름은", "# -*- coding: utf-8 -*- # 변수 ''' Python에서는 변수를 선언하여 사용할 수", "(0.1, 3, 15, 100): print('r =', r) print('PI * (r ** 2) =',", "없다. ''' for a in (1, 3, 10, -2): print('a =', a) print('a", "있다. 변수 이름은 문자나 숫자로 이루어지는데, 변수 이름은 숫자나 '_' 이외의 특수문자로 시작할", "* 3 + 1, '\\n') # 상수 ''' Python에서 변수와 같은 이름을 갖는", "만으로 이루어진 변수는 상수로 취급하여, 처음 대입된 값 이외의 값을 다시 대입하지 않는다.", "''' Python에서 변수와 같은 이름을 갖는 상수를 선언하는 방법은 기본 문법으로는 제공되지 않는다.", "숫자로 이루어지는데, 변수 이름은 숫자나 '_' 이외의 특수문자로 시작할 수 없고, 이미 역할이", "in (1, 3, 10, -2): print('a =', a) print('a × 3 + 1", "지정된 keyword는 변수 이름으로 사용할 수 없다. ''' for a in (1, 3,", "대입하지 않는다. ''' PI = 3.14159265 for r in (0.1, 3, 15, 100):", "(r ** 2) =', PI * (r ** 2), '\\n') INCH_TO_CENTIMETER = 2.54", "취급하여, 처음 대입된 값 이외의 값을 다시 대입하지 않는다. ''' PI = 3.14159265", "-*- coding: utf-8 -*- # 변수 ''' Python에서는 변수를 선언하여 사용할 수 있다.", "변수를 선언하여 사용할 수 있다. 변수 이름은 문자나 숫자로 이루어지는데, 변수 이름은 숫자나", "a) print('a × 3 + 1 =', a * 3 + 1, '\\n')", "같은 이름을 갖는 상수를 선언하는 방법은 기본 문법으로는 제공되지 않는다. 관습적으로 대문자와 '_'", "=', a) print('a × 3 + 1 =', a * 3 + 1,", "PI = 3.14159265 for r in (0.1, 3, 15, 100): print('r =', r)", "* (r ** 2) =', PI * (r ** 2), '\\n') INCH_TO_CENTIMETER =", "이름은 문자나 숫자로 이루어지는데, 변수 이름은 숫자나 '_' 이외의 특수문자로 시작할 수 없고,", "기본 문법으로는 제공되지 않는다. 관습적으로 대문자와 '_' 만으로 이루어진 변수는 상수로 취급하여, 처음" ]
[ "NONE = None INT32 = 'int32' INT64 = 'int64' FLOAT = 'float' DOUBLE", "item in cls)) class ParameterStyle(Enum): MATRIX = 'matrix' LABEL = 'label' FORM =", "SchemaFormat(Enum): NONE = None INT32 = 'int32' INT64 = 'int64' FLOAT = 'float'", "ParameterStyle(Enum): MATRIX = 'matrix' LABEL = 'label' FORM = 'form' SIMPLE = 'simple'", "has_value(cls, value): return (any(value == item.value for item in cls)) class ParameterStyle(Enum): MATRIX", "COOKIE = 'cookie' @classmethod def has_value(cls, value): return (any(value == item.value for item", "value): return (any(value == item.value for item in cls)) class ParameterStyle(Enum): MATRIX =", "PATH = 'path' QUERY = 'query' HEADER = 'header' COOKIE = 'cookie' @classmethod", "<gh_stars>0 from enum import Enum class ParameterLocation(Enum): PATH = 'path' QUERY = 'query'", "cls)) class ParameterStyle(Enum): MATRIX = 'matrix' LABEL = 'label' FORM = 'form' SIMPLE", "SchemaType(Enum): INTEGER = 'integer' NUMBER = 'number' STRING = 'string' BOOLEAN = 'boolean'", "'header' COOKIE = 'cookie' @classmethod def has_value(cls, value): return (any(value == item.value for", "= 'header' COOKIE = 'cookie' @classmethod def has_value(cls, value): return (any(value == item.value", "'boolean' ARRAY = 'array' OBJECT = 'object' class SchemaFormat(Enum): NONE = None INT32", "class ParameterStyle(Enum): MATRIX = 'matrix' LABEL = 'label' FORM = 'form' SIMPLE =", "'path' QUERY = 'query' HEADER = 'header' COOKIE = 'cookie' @classmethod def has_value(cls,", "= 'array' OBJECT = 'object' class SchemaFormat(Enum): NONE = None INT32 = 'int32'", "'float' DOUBLE = 'double' BYTE = 'byte' BINARY = 'binary' DATE = 'date'", "'string' BOOLEAN = 'boolean' ARRAY = 'array' OBJECT = 'object' class SchemaFormat(Enum): NONE", "(any(value == item.value for item in cls)) class ParameterStyle(Enum): MATRIX = 'matrix' LABEL", "'int64' FLOAT = 'float' DOUBLE = 'double' BYTE = 'byte' BINARY = 'binary'", "= 'boolean' ARRAY = 'array' OBJECT = 'object' class SchemaFormat(Enum): NONE = None", "OBJECT = 'object' class SchemaFormat(Enum): NONE = None INT32 = 'int32' INT64 =", "ARRAY = 'array' OBJECT = 'object' class SchemaFormat(Enum): NONE = None INT32 =", "'integer' NUMBER = 'number' STRING = 'string' BOOLEAN = 'boolean' ARRAY = 'array'", "'number' STRING = 'string' BOOLEAN = 'boolean' ARRAY = 'array' OBJECT = 'object'", "= 'label' FORM = 'form' SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED =", "NUMBER = 'number' STRING = 'string' BOOLEAN = 'boolean' ARRAY = 'array' OBJECT", "SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject' class", "MATRIX = 'matrix' LABEL = 'label' FORM = 'form' SIMPLE = 'simple' SPACE_DELIMITED", "import Enum class ParameterLocation(Enum): PATH = 'path' QUERY = 'query' HEADER = 'header'", "None INT32 = 'int32' INT64 = 'int64' FLOAT = 'float' DOUBLE = 'double'", "INT32 = 'int32' INT64 = 'int64' FLOAT = 'float' DOUBLE = 'double' BYTE", "= None INT32 = 'int32' INT64 = 'int64' FLOAT = 'float' DOUBLE =", "DOUBLE = 'double' BYTE = 'byte' BINARY = 'binary' DATE = 'date' DATETIME", "= 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER = 'integer'", "LABEL = 'label' FORM = 'form' SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED", "= 'float' DOUBLE = 'double' BYTE = 'byte' BINARY = 'binary' DATE =", "= 'form' SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT =", "STRING = 'string' BOOLEAN = 'boolean' ARRAY = 'array' OBJECT = 'object' class", "= 'double' BYTE = 'byte' BINARY = 'binary' DATE = 'date' DATETIME =", "INTEGER = 'integer' NUMBER = 'number' STRING = 'string' BOOLEAN = 'boolean' ARRAY", "'double' BYTE = 'byte' BINARY = 'binary' DATE = 'date' DATETIME = 'date-time'", "QUERY = 'query' HEADER = 'header' COOKIE = 'cookie' @classmethod def has_value(cls, value):", "class ParameterLocation(Enum): PATH = 'path' QUERY = 'query' HEADER = 'header' COOKIE =", "= 'number' STRING = 'string' BOOLEAN = 'boolean' ARRAY = 'array' OBJECT =", "'byte' BINARY = 'binary' DATE = 'date' DATETIME = 'date-time' PASSWORD = 'password'", "= 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum):", "DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER = 'integer' NUMBER = 'number' STRING =", "@classmethod def has_value(cls, value): return (any(value == item.value for item in cls)) class", "SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER =", "= 'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER = 'integer' NUMBER = 'number'", "'form' SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject'", "= 'deepObject' class SchemaType(Enum): INTEGER = 'integer' NUMBER = 'number' STRING = 'string'", "'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER = 'integer' NUMBER", "class SchemaType(Enum): INTEGER = 'integer' NUMBER = 'number' STRING = 'string' BOOLEAN =", "Enum class ParameterLocation(Enum): PATH = 'path' QUERY = 'query' HEADER = 'header' COOKIE", "return (any(value == item.value for item in cls)) class ParameterStyle(Enum): MATRIX = 'matrix'", "'object' class SchemaFormat(Enum): NONE = None INT32 = 'int32' INT64 = 'int64' FLOAT", "= 'int32' INT64 = 'int64' FLOAT = 'float' DOUBLE = 'double' BYTE =", "= 'query' HEADER = 'header' COOKIE = 'cookie' @classmethod def has_value(cls, value): return", "in cls)) class ParameterStyle(Enum): MATRIX = 'matrix' LABEL = 'label' FORM = 'form'", "class SchemaFormat(Enum): NONE = None INT32 = 'int32' INT64 = 'int64' FLOAT =", "'label' FORM = 'form' SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited'", "= 'int64' FLOAT = 'float' DOUBLE = 'double' BYTE = 'byte' BINARY =", "= 'path' QUERY = 'query' HEADER = 'header' COOKIE = 'cookie' @classmethod def", "from enum import Enum class ParameterLocation(Enum): PATH = 'path' QUERY = 'query' HEADER", "= 'byte' BINARY = 'binary' DATE = 'date' DATETIME = 'date-time' PASSWORD =", "ParameterLocation(Enum): PATH = 'path' QUERY = 'query' HEADER = 'header' COOKIE = 'cookie'", "enum import Enum class ParameterLocation(Enum): PATH = 'path' QUERY = 'query' HEADER =", "HEADER = 'header' COOKIE = 'cookie' @classmethod def has_value(cls, value): return (any(value ==", "for item in cls)) class ParameterStyle(Enum): MATRIX = 'matrix' LABEL = 'label' FORM", "FORM = 'form' SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT", "FLOAT = 'float' DOUBLE = 'double' BYTE = 'byte' BINARY = 'binary' DATE", "= 'string' BOOLEAN = 'boolean' ARRAY = 'array' OBJECT = 'object' class SchemaFormat(Enum):", "= 'object' class SchemaFormat(Enum): NONE = None INT32 = 'int32' INT64 = 'int64'", "def has_value(cls, value): return (any(value == item.value for item in cls)) class ParameterStyle(Enum):", "'cookie' @classmethod def has_value(cls, value): return (any(value == item.value for item in cls))", "'deepObject' class SchemaType(Enum): INTEGER = 'integer' NUMBER = 'number' STRING = 'string' BOOLEAN", "'array' OBJECT = 'object' class SchemaFormat(Enum): NONE = None INT32 = 'int32' INT64", "'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER", "= 'integer' NUMBER = 'number' STRING = 'string' BOOLEAN = 'boolean' ARRAY =", "'matrix' LABEL = 'label' FORM = 'form' SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited'", "BOOLEAN = 'boolean' ARRAY = 'array' OBJECT = 'object' class SchemaFormat(Enum): NONE =", "'query' HEADER = 'header' COOKIE = 'cookie' @classmethod def has_value(cls, value): return (any(value", "INT64 = 'int64' FLOAT = 'float' DOUBLE = 'double' BYTE = 'byte' BINARY", "item.value for item in cls)) class ParameterStyle(Enum): MATRIX = 'matrix' LABEL = 'label'", "'int32' INT64 = 'int64' FLOAT = 'float' DOUBLE = 'double' BYTE = 'byte'", "'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER = 'integer' NUMBER = 'number' STRING", "BYTE = 'byte' BINARY = 'binary' DATE = 'date' DATETIME = 'date-time' PASSWORD", "= 'matrix' LABEL = 'label' FORM = 'form' SIMPLE = 'simple' SPACE_DELIMITED =", "== item.value for item in cls)) class ParameterStyle(Enum): MATRIX = 'matrix' LABEL =", "PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER = 'integer' NUMBER =", "= 'cookie' @classmethod def has_value(cls, value): return (any(value == item.value for item in" ]
[ "as transforms import matplotlib.pyplot as plt import numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium", "img / 2 + 0.5 # unnormalize npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2,", "shuffle=False, num_workers=2) # function to show an image def imshow(img): img = img", "import matplotlib.pyplot as plt import numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer", "to show an image def imshow(img): img = img / 2 + 0.5", "return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data',", "0.5, 0.5))]) def get_train_data_loader(): transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform)", "train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function to show an", "import numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform():", "# function to show an image def imshow(img): img = img / 2", "download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform = _get_transform() testset", "an image def imshow(img): img = img / 2 + 0.5 # unnormalize", "matplotlib.pyplot as plt import numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup", "function to show an image def imshow(img): img = img / 2 +", "0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader(): transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True,", "batch_size=4, shuffle=False, num_workers=2) # function to show an image def imshow(img): img =", "return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function to show an image def imshow(img):", "import torchvision import torchvision.transforms as transforms import matplotlib.pyplot as plt import numpy as", "imshow(img): img = img / 2 + 0.5 # unnormalize npimg = img.numpy()", "train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform = _get_transform()", "def get_test_data_loader(): transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset,", "numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return", "torchvision.transforms as transforms import matplotlib.pyplot as plt import numpy as np classes =", "keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])", "as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose(", "0.5, 0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader(): transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data',", "img = img / 2 + 0.5 # unnormalize npimg = img.numpy() plt.imshow(np.transpose(npimg,", "[transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader(): transform = _get_transform() trainset", "trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader():", "0.5))]) def get_train_data_loader(): transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return", "np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(),", "download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function to show an image", "('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5),", "num_workers=2) def get_test_data_loader(): transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return", "show an image def imshow(img): img = img / 2 + 0.5 #", "truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) def", "= img / 2 + 0.5 # unnormalize npimg = img.numpy() plt.imshow(np.transpose(npimg, (1,", "batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True,", "image def imshow(img): img = img / 2 + 0.5 # unnormalize npimg", "import torch import torchvision import torchvision.transforms as transforms import matplotlib.pyplot as plt import", "= _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2)", "transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader(): transform = _get_transform()", "def get_train_data_loader(): transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset,", "(0.5, 0.5, 0.5))]) def get_train_data_loader(): transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True,", "def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader():", "classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5,", "transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True,", "_get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader(): transform", "transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader(): transform = _get_transform() trainset =", "get_test_data_loader(): transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4,", "return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader(): transform =", "_get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) #", "transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform = _get_transform() testset =", "num_workers=2) # function to show an image def imshow(img): img = img /", "torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False,", "torchvision import torchvision.transforms as transforms import matplotlib.pyplot as plt import numpy as np", "= torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function to", "get_train_data_loader(): transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4,", "= torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform", "torch import torchvision import torchvision.transforms as transforms import matplotlib.pyplot as plt import numpy", "torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function to show an image def imshow(img): img", "torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function to show", "shuffle=True, num_workers=2) def get_test_data_loader(): transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform)", "= ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5,", "fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5,", "torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform =", "/ 2 + 0.5 # unnormalize npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0)))", "transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False,", "peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5,", "transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function to show an image def", "testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function", "import torchvision.transforms as transforms import matplotlib.pyplot as plt import numpy as np classes", "= _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2)", "plt import numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def", "def imshow(img): img = img / 2 + 0.5 # unnormalize npimg =", "_get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def", "as plt import numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor')", "transforms import matplotlib.pyplot as plt import numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet" ]
[ "nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0))", "1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1]))", "unittest import main, TestCase from grammpy.old_api import * class A(Nonterminal): pass class B(Nonterminal):", "self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def", "rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def", "\"\"\" from copy import copy from unittest import main, TestCase from grammpy.old_api import", "= Grammar() def setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def", "test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g =", "class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar() def setUp(self): self.g =", "grammpy \"\"\" from copy import copy from unittest import main, TestCase from grammpy.old_api", "from grammpy.old_api import * class A(Nonterminal): pass class B(Nonterminal): pass class RuleAtoB(Rule): rule", "def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None) self.assertTrue(self.g.start_is(A)) self.assertFalse(g.start_is(A)) if __name__ == '__main__': main()", "def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g", "of grammpy \"\"\" from copy import copy from unittest import main, TestCase from", "Part of grammpy \"\"\" from copy import copy from unittest import main, TestCase", "B(Nonterminal): pass class RuleAtoB(Rule): rule = ([A], [B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'):", "class A(Nonterminal): pass class B(Nonterminal): pass class RuleAtoB(Rule): rule = ([A], [B]) class", "([A], [B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar() def setUp(self):", "16.08.2017 19:16 :Licence MIT Part of grammpy \"\"\" from copy import copy from", "g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB))", "copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None) self.assertTrue(self.g.start_is(A)) self.assertFalse(g.start_is(A)) if", "pass class RuleAtoB(Rule): rule = ([A], [B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName)", "= copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm()", "g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None) self.assertTrue(self.g.start_is(A))", "self.g = Grammar() def setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A)", "self.g = Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g)", "start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self):", "class RuleAtoB(Rule): rule = ([A], [B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g", "grammpy.old_api import * class A(Nonterminal): pass class B(Nonterminal): pass class RuleAtoB(Rule): rule =", "pass class B(Nonterminal): pass class RuleAtoB(Rule): rule = ([A], [B]) class GrammarCopyTest(TestCase): def", "= copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None) self.assertTrue(self.g.start_is(A)) self.assertFalse(g.start_is(A))", ":Created 16.08.2017 19:16 :Licence MIT Part of grammpy \"\"\" from copy import copy", "* class A(Nonterminal): pass class B(Nonterminal): pass class RuleAtoB(Rule): rule = ([A], [B])", "<NAME> :Created 16.08.2017 19:16 :Licence MIT Part of grammpy \"\"\" from copy import", "g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g)", "= ([A], [B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar() def", "def __init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar() def setUp(self): self.g = Grammar(terminals=[0, 1],", "__init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar() def setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A,", "B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1))", "g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None) self.assertTrue(self.g.start_is(A)) self.assertFalse(g.start_is(A)) if __name__", "self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g =", "class B(Nonterminal): pass class RuleAtoB(Rule): rule = ([A], [B]) class GrammarCopyTest(TestCase): def __init__(self,", "def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g", "self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A))", "A(Nonterminal): pass class B(Nonterminal): pass class RuleAtoB(Rule): rule = ([A], [B]) class GrammarCopyTest(TestCase):", "test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g =", "Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0,", "from unittest import main, TestCase from grammpy.old_api import * class A(Nonterminal): pass class", "setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g =", "g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g)", "<reponame>PatrikValkovic/grammpy<gh_stars>1-10 #!/usr/bin/env python \"\"\" :Author <NAME> :Created 16.08.2017 19:16 :Licence MIT Part of", "GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar() def setUp(self): self.g = Grammar(terminals=[0,", "= copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule()", "copy import copy from unittest import main, TestCase from grammpy.old_api import * class", "def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g)", "copy from unittest import main, TestCase from grammpy.old_api import * class A(Nonterminal): pass", "self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self):", "methodName='runTest'): super().__init__(methodName) self.g = Grammar() def setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A, B],", ":Licence MIT Part of grammpy \"\"\" from copy import copy from unittest import", "1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B))", "[B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar() def setUp(self): self.g", "= Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term()", "g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B]))", "rule = ([A], [B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar()", "import main, TestCase from grammpy.old_api import * class A(Nonterminal): pass class B(Nonterminal): pass", "from copy import copy from unittest import main, TestCase from grammpy.old_api import *", "import copy from unittest import main, TestCase from grammpy.old_api import * class A(Nonterminal):", "python \"\"\" :Author <NAME> :Created 16.08.2017 19:16 :Licence MIT Part of grammpy \"\"\"", "self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None) self.assertTrue(self.g.start_is(A)) self.assertFalse(g.start_is(A)) if __name__ == '__main__':", ":Author <NAME> :Created 16.08.2017 19:16 :Licence MIT Part of grammpy \"\"\" from copy", "\"\"\" :Author <NAME> :Created 16.08.2017 19:16 :Licence MIT Part of grammpy \"\"\" from", "import * class A(Nonterminal): pass class B(Nonterminal): pass class RuleAtoB(Rule): rule = ([A],", "copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB))", "main, TestCase from grammpy.old_api import * class A(Nonterminal): pass class B(Nonterminal): pass class", "B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self):", "self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None) self.assertTrue(self.g.start_is(A)) self.assertFalse(g.start_is(A)) if __name__ ==", "#!/usr/bin/env python \"\"\" :Author <NAME> :Created 16.08.2017 19:16 :Licence MIT Part of grammpy", "copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A,", "RuleAtoB(Rule): rule = ([A], [B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g =", "self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def", "TestCase from grammpy.old_api import * class A(Nonterminal): pass class B(Nonterminal): pass class RuleAtoB(Rule):", "test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None)", "self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g", "19:16 :Licence MIT Part of grammpy \"\"\" from copy import copy from unittest", "super().__init__(methodName) self.g = Grammar() def setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB],", "MIT Part of grammpy \"\"\" from copy import copy from unittest import main,", "def setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g", "Grammar() def setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self):" ]
[ "the user info retriever classes, i.e., classes that fetches from the relative APIs", "info retriever classes, i.e., classes that fetches from the relative APIs the information", "retriever classes, i.e., classes that fetches from the relative APIs the information about", "of the user info retriever classes, i.e., classes that fetches from the relative", "implementation of the user info retriever classes, i.e., classes that fetches from the", "the definition and implementation of the user info retriever classes, i.e., classes that", "definition and implementation of the user info retriever classes, i.e., classes that fetches", "package contains the definition and implementation of the user info retriever classes, i.e.,", "\"\"\"This package contains the definition and implementation of the user info retriever classes,", "classes, i.e., classes that fetches from the relative APIs the information about an", "i.e., classes that fetches from the relative APIs the information about an account\"\"\"", "user info retriever classes, i.e., classes that fetches from the relative APIs the", "and implementation of the user info retriever classes, i.e., classes that fetches from", "contains the definition and implementation of the user info retriever classes, i.e., classes", "<reponame>herrBez/Nduja<gh_stars>1-10 \"\"\"This package contains the definition and implementation of the user info retriever" ]
[ "= newFfs.token print(\"Token: \" + tk) print(\"Using the new FFS token to request", "\"r\") as f: config = f.read() client.ffs.set_default_config(config, tk) defaultConfig = client.ffs.default_config(tk) print(\"Updated default", "tk) print(\"Using the new FFS token to request the default config:\") defaultConfig =", "client.ffs.create() tk = newFfs.token print(\"Token: \" + tk) print(\"Using the new FFS token", "PowerGateClient client = PowerGateClient(\"127.0.0.1:5002\", False) print(\"Creating a new FFS:\") newFfs = client.ffs.create() tk", "PowerGateClient(\"127.0.0.1:5002\", False) print(\"Creating a new FFS:\") newFfs = client.ffs.create() tk = newFfs.token print(\"Token:", "request the default config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default config...\") with", "FFS:\") newFfs = client.ffs.create() tk = newFfs.token print(\"Token: \" + tk) print(\"Using the", "as f: config = f.read() client.ffs.set_default_config(config, tk) defaultConfig = client.ffs.default_config(tk) print(\"Updated default config:\")", "pygate_grpc.client import PowerGateClient client = PowerGateClient(\"127.0.0.1:5002\", False) print(\"Creating a new FFS:\") newFfs =", "token to request the default config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default", "new FFS:\") newFfs = client.ffs.create() tk = newFfs.token print(\"Token: \" + tk) print(\"Using", "a new FFS:\") newFfs = client.ffs.create() tk = newFfs.token print(\"Token: \" + tk)", "tk = newFfs.token print(\"Token: \" + tk) print(\"Using the new FFS token to", "import PowerGateClient client = PowerGateClient(\"127.0.0.1:5002\", False) print(\"Creating a new FFS:\") newFfs = client.ffs.create()", "<filename>examples/ffs_config.py from pygate_grpc.client import PowerGateClient client = PowerGateClient(\"127.0.0.1:5002\", False) print(\"Creating a new FFS:\")", "print(\"Creating a new FFS:\") newFfs = client.ffs.create() tk = newFfs.token print(\"Token: \" +", "with open(\"cidconfig.json\", \"r\") as f: config = f.read() client.ffs.set_default_config(config, tk) defaultConfig = client.ffs.default_config(tk)", "config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default config...\") with open(\"cidconfig.json\", \"r\") as", "default config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default config...\") with open(\"cidconfig.json\", \"r\")", "= client.ffs.create() tk = newFfs.token print(\"Token: \" + tk) print(\"Using the new FFS", "from pygate_grpc.client import PowerGateClient client = PowerGateClient(\"127.0.0.1:5002\", False) print(\"Creating a new FFS:\") newFfs", "f: config = f.read() client.ffs.set_default_config(config, tk) defaultConfig = client.ffs.default_config(tk) print(\"Updated default config:\") print(defaultConfig)", "print(\"Loading new default config...\") with open(\"cidconfig.json\", \"r\") as f: config = f.read() client.ffs.set_default_config(config,", "defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default config...\") with open(\"cidconfig.json\", \"r\") as f:", "= client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default config...\") with open(\"cidconfig.json\", \"r\") as f: config", "False) print(\"Creating a new FFS:\") newFfs = client.ffs.create() tk = newFfs.token print(\"Token: \"", "\" + tk) print(\"Using the new FFS token to request the default config:\")", "new FFS token to request the default config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading", "newFfs.token print(\"Token: \" + tk) print(\"Using the new FFS token to request the", "client = PowerGateClient(\"127.0.0.1:5002\", False) print(\"Creating a new FFS:\") newFfs = client.ffs.create() tk =", "config...\") with open(\"cidconfig.json\", \"r\") as f: config = f.read() client.ffs.set_default_config(config, tk) defaultConfig =", "the default config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default config...\") with open(\"cidconfig.json\",", "default config...\") with open(\"cidconfig.json\", \"r\") as f: config = f.read() client.ffs.set_default_config(config, tk) defaultConfig", "FFS token to request the default config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new", "to request the default config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default config...\")", "new default config...\") with open(\"cidconfig.json\", \"r\") as f: config = f.read() client.ffs.set_default_config(config, tk)", "print(\"Using the new FFS token to request the default config:\") defaultConfig = client.ffs.default_config(tk)", "+ tk) print(\"Using the new FFS token to request the default config:\") defaultConfig", "= PowerGateClient(\"127.0.0.1:5002\", False) print(\"Creating a new FFS:\") newFfs = client.ffs.create() tk = newFfs.token", "the new FFS token to request the default config:\") defaultConfig = client.ffs.default_config(tk) print(defaultConfig)", "open(\"cidconfig.json\", \"r\") as f: config = f.read() client.ffs.set_default_config(config, tk) defaultConfig = client.ffs.default_config(tk) print(\"Updated", "newFfs = client.ffs.create() tk = newFfs.token print(\"Token: \" + tk) print(\"Using the new", "client.ffs.default_config(tk) print(defaultConfig) print(\"Loading new default config...\") with open(\"cidconfig.json\", \"r\") as f: config =", "print(defaultConfig) print(\"Loading new default config...\") with open(\"cidconfig.json\", \"r\") as f: config = f.read()", "print(\"Token: \" + tk) print(\"Using the new FFS token to request the default" ]
[ "\"POST\" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, ) def save(self, commit=True): instance", "categories_field, css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper =", "title_field = layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"), title_field, content_field)", "mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import Category from .models import Idea, RATING_CHOICES from", "crispy_forms import bootstrap, helper, layout from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import Category", "= self.request.user if commit: instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField(", "%}\"\"\" ) picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field", ") rating = forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def __init__(self, *args, **kwargs): super().__init__(*args,", "choices=RATING_CHOICES ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field = layout.Field(\"author\") category_field =", "self.save_m2m() return instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category", "css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method = \"POST\" self.helper.layout = layout.Layout(", "categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\", _(\"Save\")) actions =", "import get_user_model from crispy_forms import bootstrap, helper, layout from mptt.forms import TreeNodeChoiceField from", "layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field =", "layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"), author_field,", "= bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field, actions, ) self.helper =", "TreeNodeChoiceField from myproject.apps.categories1.models import Category from .models import Idea, RATING_CHOICES from ..core.form_fields import", "layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset = layout.Fieldset( _(\"Picture\"),", ") picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field =", "_(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" )", "= layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions", "layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method", "myproject.apps.categories1.models import Category from .models import Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User", "layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions =", "= layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action = self.request.path", "def save(self, commit=True): instance = super().save(commit=False) instance.author = self.request.user if commit: instance.save() self.save_m2m()", "picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field = layout.Field(", "= layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset", "main_fieldset, picture_fieldset, categories_fieldset, actions, ) def save(self, commit=True): instance = super().save(commit=False) instance.author =", "__init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field = layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field =", "title_field, content_field) picture_field = layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" )", "Category from .models import Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User = get_user_model()", "format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset =", ") def save(self, commit=True): instance = super().save(commit=False) instance.author = self.request.user if commit: instance.save()", "*args, **kwargs): self.request = request super().__init__(*args, **kwargs) title_field = layout.Field(\"title\") content_field = layout.Field(\"content\",", "def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field = layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field", "from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _ from django.contrib.auth import", "label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class Meta: model = Idea exclude = [\"author\"] def", "save(self, commit=True): instance = super().save(commit=False) instance.author = self.request.user if commit: instance.save() self.save_m2m() return", "include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\",", "queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField(", "super().save(commit=False) instance.author = self.request.user if commit: instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form): author", "from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import Category from .models import Idea, RATING_CHOICES", "import ugettext_lazy as _ from django.contrib.auth import get_user_model from crispy_forms import bootstrap, helper,", "= layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset = layout.Fieldset(", "= get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class Meta:", ") submit_button = layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action", "self.request = request super().__init__(*args, **kwargs) title_field = layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\") main_fieldset", "Meta: model = Idea exclude = [\"author\"] def __init__(self, request, *args, **kwargs): self.request", "actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method = \"POST\"", "import Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm): categories", "= layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, ) def save(self, commit=True): instance = super().save(commit=False)", "required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def __init__(self,", "Idea exclude = [\"author\"] def __init__(self, request, *args, **kwargs): self.request = request super().__init__(*args,", "super().__init__(*args, **kwargs) author_field = layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button =", "MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), )", "category_field, rating_field, actions, ) self.helper = helper.FormHelper() self.helper.form_method = \"GET\" self.helper.layout = layout.Layout(main_fieldset)", "from django import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as", "django import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _", "request, *args, **kwargs): self.request = request super().__init__(*args, **kwargs) title_field = layout.Field(\"title\") content_field =", "**kwargs) author_field = layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button = layout.Submit(\"filter\",", "instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField(", "author = forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(),", "django.utils.translation import ugettext_lazy as _ from django.contrib.auth import get_user_model from crispy_forms import bootstrap,", "instance = super().save(commit=False) instance.author = self.request.user if commit: instance.save() self.save_m2m() return instance class", "forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") )", "\"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\",", "label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def", "= layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field = layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{% include", "..core.form_fields import MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False,", "= layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field", "rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field = layout.Field(\"picture\") format_html = layout.HTML(", "categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" )", "exclude = [\"author\"] def __init__(self, request, *args, **kwargs): self.request = request super().__init__(*args, **kwargs)", "css_id=\"picture_fieldset\", ) categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field,", "get_user_model from crispy_forms import bootstrap, helper, layout from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models", "django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _ from django.contrib.auth import get_user_model", "**kwargs) title_field = layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"), title_field,", "helper, layout from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import Category from .models import", "author_field = layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\"))", "= forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\")", "= MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class Meta: model = Idea exclude =", "= [\"author\"] def __init__(self, request, *args, **kwargs): self.request = request super().__init__(*args, **kwargs) title_field", "_(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field, actions, )", "instance.author = self.request.user if commit: instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form): author =", "label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating", "self.request.path self.helper.form_method = \"POST\" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, ) def", "_(\"Filter\"), author_field, category_field, rating_field, actions, ) self.helper = helper.FormHelper() self.helper.form_method = \"GET\" self.helper.layout", "= \"POST\" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, ) def save(self, commit=True):", "main_fieldset = layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field, actions, ) self.helper = helper.FormHelper() self.helper.form_method", "from django.contrib.auth import get_user_model from crispy_forms import bootstrap, helper, layout from mptt.forms import", "get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class Meta: model", "label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field = layout.Field(\"author\")", "format_html = layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field,", "self.helper.form_action = self.request.path self.helper.form_method = \"POST\" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions,", "= self.request.path self.helper.form_method = \"POST\" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, )", "MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class Meta: model = Idea exclude = [\"author\"]", "import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _ from", "picture_field = layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset =", "from myproject.apps.categories1.models import Category from .models import Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField", "submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"), author_field, category_field,", ") categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\", _(\"Save\")) actions", "layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset =", "def __init__(self, request, *args, **kwargs): self.request = request super().__init__(*args, **kwargs) title_field = layout.Field(\"title\")", "layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, ) def save(self, commit=True): instance = super().save(commit=False) instance.author", "django.contrib.auth import get_user_model from crispy_forms import bootstrap, helper, layout from mptt.forms import TreeNodeChoiceField", "super().__init__(*args, **kwargs) title_field = layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"),", "= layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field = layout.Field( \"categories\",", "= layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button,", "instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), )", "= forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field", ") category = TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField( label=_(\"Rating\"),", "if commit: instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"), required=False,", "main_fieldset = layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field = layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{%", "TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES )", "bootstrap, helper, layout from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import Category from .models", "content_field = layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field = layout.Field(\"picture\")", "= layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button", "self.request.user if commit: instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"),", "author_field, category_field, rating_field, actions, ) self.helper = helper.FormHelper() self.helper.form_method = \"GET\" self.helper.layout =", "= request super().__init__(*args, **kwargs) title_field = layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\") main_fieldset =", "submit_button = layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action =", "self.helper.form_method = \"POST\" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, ) def save(self,", "mark_safe from django.utils.translation import ugettext_lazy as _ from django.contrib.auth import get_user_model from crispy_forms", "= layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field = layout.Field(\"picture\") format_html", "actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field, actions, ) self.helper", "template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\", _(\"Save\"))", "forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _ from django.contrib.auth", ".models import Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm):", ") class Meta: model = Idea exclude = [\"author\"] def __init__(self, request, *args,", "\"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"),", "forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field =", "picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset", "from django.utils.translation import ugettext_lazy as _ from django.contrib.auth import get_user_model from crispy_forms import", "helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method = \"POST\" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset,", "= helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method = \"POST\" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset,", "categories_fieldset, actions, ) def save(self, commit=True): instance = super().save(commit=False) instance.author = self.request.user if", "category_field = layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button)", "commit: instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(),", "as _ from django.contrib.auth import get_user_model from crispy_forms import bootstrap, helper, layout from", "layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field = layout.Field(\"picture\") format_html =", "from crispy_forms import bootstrap, helper, layout from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import", "= layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field, actions, ) self.helper = helper.FormHelper() self.helper.form_method =", "<reponame>PacktPublishing/Django-3-Web-Development-Cookbook from django import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy", "= layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field,", "= Idea exclude = [\"author\"] def __init__(self, request, *args, **kwargs): self.request = request", "= layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field, format_html,", "from ..core.form_fields import MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"),", "commit=True): instance = super().save(commit=False) instance.author = self.request.user if commit: instance.save() self.save_m2m() return instance", "required=False, queryset=Category.objects.all(), ) class Meta: model = Idea exclude = [\"author\"] def __init__(self,", "import TreeNodeChoiceField from myproject.apps.categories1.models import Category from .models import Idea, RATING_CHOICES from ..core.form_fields", ") categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\"", "rating_field = layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset(", "_ from django.contrib.auth import get_user_model from crispy_forms import bootstrap, helper, layout from mptt.forms", "from .models import Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User = get_user_model() class", "IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class Meta: model = Idea", "= TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES", "rating = forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs)", "actions, ) def save(self, commit=True): instance = super().save(commit=False) instance.author = self.request.user if commit:", "**kwargs): super().__init__(*args, **kwargs) author_field = layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field = layout.Field(\"rating\") submit_button", "_(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper", ") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field = layout.Field(\"author\") category_field = layout.Field(\"category\")", "class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_(\"Category\"),", "level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def __init__(self, *args, **kwargs):", "data\"), title_field, content_field) picture_field = layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\"", "\"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", )", "layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\"", "= bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method = \"POST\" self.helper.layout", "content_field) picture_field = layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset", "layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button =", "Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm): categories =", "**kwargs): self.request = request super().__init__(*args, **kwargs) title_field = layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\")", "[\"author\"] def __init__(self, request, *args, **kwargs): self.request = request super().__init__(*args, **kwargs) title_field =", "layout from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import Category from .models import Idea,", "css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper()", "title=_(\"Image upload\"), css_id=\"picture_fieldset\", ) categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset(", "class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class Meta: model =", "categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class Meta: model = Idea exclude", "upload\"), css_id=\"picture_fieldset\", ) categories_field = layout.Field( \"categories\", template=\"core/includes/checkboxselectmultiple_tree.html\" ) categories_fieldset = layout.Fieldset( _(\"Categories\"),", "import MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(),", "*args, **kwargs): super().__init__(*args, **kwargs) author_field = layout.Field(\"author\") category_field = layout.Field(\"category\") rating_field = layout.Field(\"rating\")", "import mark_safe from django.utils.translation import ugettext_lazy as _ from django.contrib.auth import get_user_model from", "request super().__init__(*args, **kwargs) title_field = layout.Field(\"title\") content_field = layout.Field(\"content\", rows=\"3\") main_fieldset = layout.Fieldset(_(\"Main", "self.helper = helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method = \"POST\" self.helper.layout = layout.Layout( main_fieldset,", "queryset=Category.objects.all(), ) class Meta: model = Idea exclude = [\"author\"] def __init__(self, request,", "layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field, actions, ) self.helper = helper.FormHelper() self.helper.form_method = \"GET\"", "= layout.Field(\"rating\") submit_button = layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"),", "required=False, choices=RATING_CHOICES ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field = layout.Field(\"author\") category_field", "bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method = \"POST\" self.helper.layout =", "User = get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_(\"Categories\"), required=False, queryset=Category.objects.all(), ) class", "__init__(self, request, *args, **kwargs): self.request = request super().__init__(*args, **kwargs) title_field = layout.Field(\"title\") content_field", "layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\" %}\"\"\" ) picture_fieldset = layout.Fieldset( _(\"Picture\"), picture_field, format_html, title=_(\"Image", "picture_fieldset, categories_fieldset, actions, ) def save(self, commit=True): instance = super().save(commit=False) instance.author = self.request.user", "_(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\") self.helper = helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method =", "RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField(", "layout.Fieldset( _(\"Categories\"), categories_field, css_id=\"categories_fieldset\" ) submit_button = layout.Submit(\"save\", _(\"Save\")) actions = bootstrap.FormActions(submit_button, css_class=\"my-4\")", "return instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category =", "class Meta: model = Idea exclude = [\"author\"] def __init__(self, request, *args, **kwargs):", "self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, ) def save(self, commit=True): instance =", "IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_(\"Author\"), required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_(\"Category\"), required=False,", "= super().save(commit=False) instance.author = self.request.user if commit: instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form):", "required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating =", "layout.Submit(\"filter\", _(\"Filter\")) actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field, actions,", "import bootstrap, helper, layout from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import Category from", "model = Idea exclude = [\"author\"] def __init__(self, request, *args, **kwargs): self.request =", "ugettext_lazy as _ from django.contrib.auth import get_user_model from crispy_forms import bootstrap, helper, layout", "import Category from .models import Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User =", "bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _(\"Filter\"), author_field, category_field, rating_field, actions, ) self.helper = helper.FormHelper()", "category = TreeNodeChoiceField( label=_(\"Category\"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField( label=_(\"Rating\"), required=False,", "layout.Fieldset(_(\"Main data\"), title_field, content_field) picture_field = layout.Field(\"picture\") format_html = layout.HTML( \"\"\"{% include \"ideas1/includes/picture_guidelines.html\"", "queryset=Category.objects.all(), level_indicator=mark_safe(\"&nbsp;&nbsp;&nbsp;&nbsp;\") ) rating = forms.ChoiceField( label=_(\"Rating\"), required=False, choices=RATING_CHOICES ) def __init__(self, *args," ]